id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
schuler/experimental-JP47E01 | null | null | "2024-11-11T03:48:04Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"kphi3",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"KPhi3ForCausalLM"
] | 1 | 2 | silu | 512 | 0.02 | 2,048 | 81 | kphi3 | 16 | 2 | 16 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 22,000 | null | 0 | null | null | null | null |
artificialguybr/QWEN-2.5-0.5B-Synthia-I | null | null | "2024-11-11T04:36:42Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"generated_from_trainer",
"instruction-tuning",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-0.5B",
"base_model:finetune:Qwen/Qwen2.5-0.5B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | null | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.45.0.dev0 | false | 151,936 | null | 0 | null | null | null | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_stereo_version_1_subset_epoch2 | null | null | "2024-11-11T04:49:47Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_full_refine_version_1_subset_epoch2 | null | null | "2024-11-11T04:50:28Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_stereo_version_1_2_subset_epoch2 | null | null | "2024-11-11T04:51:39Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_full_refine_version_1_2_subset_epoch2 | null | null | "2024-11-11T04:55:44Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
Aurora-Gem/Opt_lora16_qwen2.5_7B_model_25k-1111 | null | null | "2024-11-11T04:59:45Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 4,096 | qwen2 | 28 | 28 | 4 | 0.000001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
KaKee/llama-2-7b-chat_CrowS_Pairs_original_epoch2 | null | null | "2024-11-11T05:09:26Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
dasiy/M2_LLAMA2 | null | null | "2024-11-11T05:12:35Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yejinkim/forget05_expert_epoch10 | null | null | "2024-11-11T05:24:28Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"PhiForCausalLM"
] | null | null | gelu_new | 2,048 | 0.02 | 8,192 | 2,048 | phi | 32 | 24 | 32 | null | 10,000 | null | false | float32 | 4.46.0.dev0 | false | 51,200 | null | 0 | null | null | null | null |
ADHIZ/omni_mani | null | null | "2024-11-11T05:29:42Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
skchoi1015/Llama-3-Open-Ko-8B-Instruct-skchoi | null | null | "2024-11-11T05:31:37Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"unsloth",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
rawsh/mirrorqwen2.5-0.5b-SimPO-3 | null | null | "2024-11-11T05:47:16Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"cpo",
"unsloth",
"arxiv:2401.08417",
"base_model:rawsh/mirrorqwen2.5-0.5b-SimPO-2",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-SimPO-2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |
huihui-ai/SmolLM2-1.7B-Instruct-abliterated | null | null | "2024-11-11T06:42:17Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"Safetensors",
"conversational",
"text-generation-inference",
"abliterated",
"uncensored",
"en",
"base_model:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"base_model:finetune:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 8,192 | llama | 32 | 24 | 32 | 0.00001 | 130,000 | null | true | bfloat16 | 4.42.3 | true | 49,152 | false | 0 | null | false | 1 | null |
schuler/experimental-JP47D03 | null | null | "2024-11-11T07:04:06Z" | null | null | 22 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"kphi3",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"KPhi3ForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 512 | kphi3 | 32 | 2 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 32,064 | null | 0 | null | null | null | null |
NandaArya/Mistralv03-7B-Finetuned | null | null | "2024-11-11T07:10:55Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"unsloth",
"text-generation-inference",
"id",
"ar",
"dataset:emhaihsan/quran-indonesia-tafseer-translation",
"base_model:unsloth/mistral-7b-v0.3",
"base_model:finetune:unsloth/mistral-7b-v0.3",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | true | 32,768 | null | 0 | 128 | null | null | null |
visdata/llama_3c | null | null | "2024-11-11T07:12:25Z" | null | null | 266 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.1 | false | 50,257 | false | 0 | 64 | false | 1 | null |
zaanind/gpt2_finetune_films | null | null | "2024-11-11T07:15:02Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 50,257 | null | null | null | null | null | null |
visdata/llama_3c2 | null | null | "2024-11-11T07:20:14Z" | null | null | 267 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 50,256 | 50,256 | silu | 1,408 | 0.02 | 4,340 | 2,048 | llama | 22 | 24 | 22 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.1 | false | 50,257 | false | 0 | 64 | false | 1 | null |
VortexKnight7/Video-Summ-Qwen | null | null | "2024-11-11T07:23:36Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"conversational",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
teka38/Qwen2.5_0.5_DocMental-_v2.5a | null | null | "2024-11-11T07:30:34Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.44.2 | true | 151,936 | null | 0 | null | null | null | null |
thanhtunguet/vietnamese-administrative-units-model | null | null | "2024-11-11T07:37:38Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2-medium",
"base_model:finetune:openai-community/gpt2-medium",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
Rich-J/subnet29_C0_Nov_10 | null | null | "2024-11-11T07:44:02Z" | null | null | 80 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 18,752 | 4,096 | llama | 32 | 30 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
Rich-J/subnet29_C0_Nov_11 | null | null | "2024-11-11T07:44:23Z" | null | null | 36 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
ihughes15234/phi35_tictactoe_dpo4epoch_v5 | null | null | "2024-11-11T07:47:56Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi35_tictactoe_dpo2epoch_v5",
"base_model:finetune:ihughes15234/phi35_tictactoe_dpo2epoch_v5",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
gdshaji/gd-sn11-mistralai10k | null | null | "2024-11-11T07:52:03Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
schuler/experimental-JP47D01 | null | null | "2024-11-11T07:54:52Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"kphi3",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"KPhi3ForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 16,384 | 512 | kphi3 | 64 | 32 | 64 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 32,064 | null | 0 | null | null | null | null |
schuler/experimental-JP47D02 | null | null | "2024-11-11T07:56:02Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"kphi3",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"KPhi3ForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 16,384 | 512 | kphi3 | 64 | 32 | 64 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 32,010 | null | 0 | null | null | null | null |
allknowingroger/QwenSlerp4-7B | null | null | "2024-11-11T08:03:12Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2",
"base_model:merge:huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
allknowingroger/QwenSlerp5-7B | null | null | "2024-11-11T08:17:38Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:allknowingroger/Qwenslerp2-7B",
"base_model:merge:allknowingroger/Qwenslerp2-7B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
broalantap/Llama3-Compressor-Adapter-4-60000steps | null | null | "2024-11-11T08:24:13Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | bfloat16 | 4.41.2 | true | 50,321 | null | null | null | null | null | null |
allknowingroger/QwenSlerp6-7B | null | null | "2024-11-11T08:43:32Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:allknowingroger/Qwenslerp3-7B",
"base_model:merge:allknowingroger/Qwenslerp3-7B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
slobers/spinkle | null | null | "2024-11-11T08:47:35Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.41.2 | false | 32,064 | false | 0 | null | null | null | null |
Darshan03/Edu-Model-v7 | null | null | "2024-11-11T10:03:25Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | false | 151,936 | null | 0 | null | null | 1 | null |
ClaudioItaly/Coder | null | null | "2024-11-11T10:06:14Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Qwen/Qwen2.5-Coder-7B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-7B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 152,064 | null | 0 | null | null | null | null |
Darshan03/Qwen2-7B-Instruct-FineTuning | null | null | "2024-11-11T10:07:53Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"sft",
"conversational",
"base_model:Qwen/Qwen2.5-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-3B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | false | 151,936 | null | 0 | null | null | 1 | null |
ClaudioItaly/CoderQ | null | null | "2024-11-11T10:18:50Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2306.01708",
"base_model:Qwen/Qwen2.5-Coder-7B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-7B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | true | 151,665 | null | 0 | null | null | null | null |
NESPED-GEN/StableCode-text2SQL-v0 | null | null | "2024-11-11T10:30:30Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"stablelm",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"StableLmForCausalLM"
] | 0 | 0 | silu | 2,560 | 0.02 | 6,912 | 16,384 | stablelm | 32 | 32 | 32 | null | 1,000,000 | null | false | float16 | 4.44.2 | false | 50,304 | null | 0 | null | null | null | null |
Kerneld/yi-ko-6b-text2sql | null | null | "2024-11-11T10:31:31Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.40.1 | true | 78,464 | false | 0 | null | null | 1 | null |
Darshan03/Edu-Model-v9 | null | null | "2024-11-11T10:33:51Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | false | 151,936 | null | 0 | null | null | 1 | null |
MrRobotoAI/Loki-.Epic_Fiction.-8b | null | null | "2024-11-11T10:40:11Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"mergekit",
"merge",
"arxiv:2403.19522",
"base_model:Azazelle/L3-Daybreak-8b-lora",
"base_model:merge:Azazelle/L3-Daybreak-8b-lora",
"base_model:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:merge:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:merge:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:Azazelle/Llama-3-LongStory-LORA",
"base_model:merge:Azazelle/Llama-3-LongStory-LORA",
"base_model:Azazelle/Nimue-8B",
"base_model:merge:Azazelle/Nimue-8B",
"base_model:Azazelle/Smarts_Llama3",
"base_model:merge:Azazelle/Smarts_Llama3",
"base_model:Blackroot/Llama-3-LongStory-LORA",
"base_model:merge:Blackroot/Llama-3-LongStory-LORA",
"base_model:Blackroot/Llama3-RP-Lora",
"base_model:merge:Blackroot/Llama3-RP-Lora",
"base_model:MrRobotoAI/Epic_Fiction-8b",
"base_model:merge:MrRobotoAI/Epic_Fiction-8b",
"base_model:ResplendentAI/Aura_Llama3",
"base_model:merge:ResplendentAI/Aura_Llama3",
"base_model:ResplendentAI/BlueMoon_Llama3",
"base_model:merge:ResplendentAI/BlueMoon_Llama3",
"base_model:ResplendentAI/Luna_Llama3",
"base_model:merge:ResplendentAI/Luna_Llama3",
"base_model:ResplendentAI/NoWarning_Llama3",
"base_model:merge:ResplendentAI/NoWarning_Llama3",
"base_model:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:merge:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:merge:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:nicce/story-mixtral-8x7b-lora",
"base_model:merge:nicce/story-mixtral-8x7b-lora",
"base_model:nothingiisreal/llama3-8B-DWP-lora",
"base_model:merge:nothingiisreal/llama3-8B-DWP-lora",
"base_model:surya-narayanan/college_chemistry",
"base_model:merge:surya-narayanan/college_chemistry",
"base_model:surya-narayanan/college_computer_science",
"base_model:merge:surya-narayanan/college_computer_science",
"base_model:surya-narayanan/college_physics",
"base_model:merge:surya-narayanan/college_physics",
"base_model:surya-narayanan/conceptual_physics",
"base_model:merge:surya-narayanan/conceptual_physics",
"base_model:surya-narayanan/electrical_engineering",
"base_model:merge:surya-narayanan/electrical_engineering",
"base_model:surya-narayanan/formal_logic",
"base_model:merge:surya-narayanan/formal_logic",
"base_model:surya-narayanan/high_school_european_history",
"base_model:merge:surya-narayanan/high_school_european_history",
"base_model:surya-narayanan/human_sexuality",
"base_model:merge:surya-narayanan/human_sexuality",
"base_model:surya-narayanan/philosophy",
"base_model:merge:surya-narayanan/philosophy",
"base_model:surya-narayanan/philosophy_100",
"base_model:merge:surya-narayanan/philosophy_100",
"base_model:surya-narayanan/philosophy_non_masked",
"base_model:merge:surya-narayanan/philosophy_non_masked",
"base_model:surya-narayanan/physics_non_masked",
"base_model:merge:surya-narayanan/physics_non_masked",
"base_model:surya-narayanan/psychology",
"base_model:merge:surya-narayanan/psychology",
"base_model:surya-narayanan/psychology_non_masked",
"base_model:merge:surya-narayanan/psychology_non_masked",
"base_model:surya-narayanan/sociology",
"base_model:merge:surya-narayanan/sociology",
"base_model:surya-narayanan/world_religions",
"base_model:merge:surya-narayanan/world_religions",
"base_model:vincentyandex/lora_llama3_chunked_novel_bs128",
"base_model:merge:vincentyandex/lora_llama3_chunked_novel_bs128",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 1,048,576 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
NESPED-GEN/StableCode-text2SQL-indentacao | null | null | "2024-11-11T10:42:30Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"stablelm",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"StableLmForCausalLM"
] | 0 | 0 | silu | 2,560 | 0.02 | 6,912 | 16,384 | stablelm | 32 | 32 | 32 | null | 1,000,000 | null | false | float16 | 4.44.2 | false | 50,304 | null | 0 | null | null | null | null |
Darshan03/Edu-Model-v10 | null | null | "2024-11-11T10:43:53Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | false | 151,936 | null | 0 | null | null | 1 | null |
teka38/Somil_DR_Mental1.0 | null | null | "2024-11-11T10:49:49Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 8,192 | llama | 15 | 32 | 5 | 0.00001 | 100,000 | null | true | float32 | 4.44.2 | true | 49,152 | false | 0 | null | false | 1 | null |
NESPED-GEN/StableCode-text2SQL-alias | null | null | "2024-11-11T10:56:11Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"stablelm",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"StableLmForCausalLM"
] | 0 | 0 | silu | 2,560 | 0.02 | 6,912 | 16,384 | stablelm | 32 | 32 | 32 | null | 1,000,000 | null | false | float16 | 4.44.2 | false | 50,304 | null | 0 | null | null | null | null |
kanishka/opt-babylm2-rewritten-clean-spacy-32k-earlystop-40epochs_seed-42_3e-4 | null | null | "2024-11-11T11:15:15Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"opt",
"text-generation",
"generated_from_trainer",
"dataset:kanishka/babylm2-rewritten-clean-spacy",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"OPTForCausalLM"
] | 1 | 1 | null | 768 | null | null | 256 | opt | 12 | 12 | null | null | null | null | null | float32 | 4.45.1 | true | 32,768 | null | 0 | null | null | null | null |
teka38/Somil_DR_Mental1.5 | null | null | "2024-11-11T11:20:12Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 8,192 | llama | 15 | 32 | 5 | 0.00001 | 100,000 | null | true | float32 | 4.44.2 | true | 49,152 | false | 0 | null | false | 1 | null |
Triangle104/Mistral_Sunair-V1.0 | null | null | "2024-11-11T11:21:09Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2311.03099",
"arxiv:2306.01708",
"base_model:anthracite-org/magnum-v4-12b",
"base_model:merge:anthracite-org/magnum-v4-12b",
"base_model:nbeerbower/Lyra4-Gutenberg2-12B",
"base_model:merge:nbeerbower/Lyra4-Gutenberg2-12B",
"base_model:nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B-v2",
"base_model:merge:nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B-v2",
"base_model:unsloth/Mistral-Nemo-Instruct-2407",
"base_model:merge:unsloth/Mistral-Nemo-Instruct-2407",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 14,336 | 1,024,000 | mistral | 32 | 40 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 131,072 | null | 0 | 128 | null | null | null |
second-state/Qwen2.5-Coder-32B-Instruct-GGUF | null | null | "2024-11-11T11:23:48Z" | null | null | 61 | null | null | null | null | 0 | transformers | [
"transformers",
"gguf",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
gaianet/Qwen2.5-Coder-32B-Instruct-GGUF | null | null | "2024-11-11T11:24:06Z" | null | null | 221 | null | null | null | null | 0 | transformers | [
"transformers",
"gguf",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
XeroCodes/aether-30m | null | null | "2024-11-11T11:55:10Z" | null | null | 0 | null | null | null | null | 0 | peft | [
"peft",
"safetensors",
"llama",
"text-generation",
"en",
"license:apache-2.0",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 2 | 3 | silu | 256 | 0.02 | 786 | 2,048 | llama | 8 | 16 | 8 | 0.000001 | 10,000 | null | false | float32 | 4.47.0.dev0 | false | 32,001 | false | 0 | 32 | false | 1 | null |
Aurora-Gem/Opt_lora16_qwen2.5_7B_model_25k-1111-2 | null | null | "2024-11-11T12:12:37Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"unsloth",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 4,096 | qwen2 | 28 | 28 | 4 | 0.000001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
ihughes15234/phi35_tictactoe_dpo10epoch_v6 | null | null | "2024-11-11T12:29:06Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi_3_5_mini_tictactoe1200",
"base_model:finetune:ihughes15234/phi_3_5_mini_tictactoe1200",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
NESPED-GEN/StableCode-text2SQL-alias-indentacao | null | null | "2024-11-11T12:31:32Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"stablelm",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"StableLmForCausalLM"
] | 0 | 0 | silu | 2,560 | 0.02 | 6,912 | 16,384 | stablelm | 32 | 32 | 32 | null | 1,000,000 | null | false | float16 | 4.44.2 | false | 50,304 | null | 0 | null | null | null | null |
chungssu/kogpt2-inst-newstitle | Heewon Jeon(madjakarta@gmail.com) | null | "2024-11-11T12:34:53Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 0 | 1 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 51,200 | null | null | null | null | null | null |
kadeyshvili/my_implementation_llama | null | null | "2024-11-11T12:38:01Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"my_implementation_llama_new_new7",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAModel"
] | null | null | null | null | null | null | null | my_implementation_llama_new_new7 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | 32,000 | null | null | null | null | null | null |
RyanYr/self-reflect_mistralSmallit_mg_dpo_iter2 | null | null | "2024-11-11T12:43:44Z" | null | null | 39 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:RyanYr/self-reflect_mistralSmallit_mg_dpo_iter1",
"base_model:finetune:RyanYr/self-reflect_mistralSmallit_mg_dpo_iter1",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.2 | false | 32,769 | null | 0 | 128 | null | null | null |
marcomaccarini/FLARE | null | null | "2024-11-11T13:06:01Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.42.3 | true | 128,256 | false | 0 | null | false | 1 | null |
Mr-Vicky-01/new-qa | null | null | "2024-11-11T13:10:12Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | false | 50,259 | null | null | null | null | null | null |
gdshaji/gd-sn11-mistralai10k-upd | null | null | "2024-11-11T13:34:22Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
AndreyRzhaksinskiy/CDS-CodeLlama-7b-20241112_pretrain | null | null | "2024-11-11T13:47:16Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.0 | true | 32,016 | false | 0 | 128 | false | 1 | null |
kadeyshvili/my_implementation_llama_new_new5 | null | null | "2024-11-11T13:59:06Z" | null | null | 26 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"my_implementation_llama_new_new5",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAModel"
] | null | null | null | null | null | null | null | my_implementation_llama_new_new5 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | 32,000 | null | null | null | null | null | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_stereo_version_1_2_3_4_5_6_subset_epoch2 | null | null | "2024-11-11T14:09:06Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
VortexKnight7/Video-Summ-Text | null | null | "2024-11-11T14:17:06Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
Orion-zhen/OpenCoder-8B-Instruct-AWQ | null | null | "2024-11-11T14:32:13Z" | null | null | 12 | null | null | null | null | 0 | null | [
"safetensors",
"llama",
"text-generation",
"conversational",
"zh",
"en",
"base_model:infly/OpenCoder-8B-Instruct",
"base_model:quantized:infly/OpenCoder-8B-Instruct",
"license:gpl-3.0",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.44.2 | true | 96,640 | false | 0 | null | false | 1 | null |
tanquangduong/Qwen2.5-3B-DPO-TinyStories | null | null | "2024-11-11T14:43:32Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"dpo",
"en",
"base_model:unsloth/Qwen2.5-3B",
"base_model:finetune:unsloth/Qwen2.5-3B",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
AndreyRzhaksinskiy/CDS-CodeLlama-7b-20241112 | null | null | "2024-11-11T14:48:48Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.0 | true | 32,016 | false | 0 | 128 | false | 1 | null |
kadeyshvili/my_implementation_llama_new_new6 | null | null | "2024-11-11T14:54:03Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"safetensors",
"my_implementation_llama_new_new6",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAModel"
] | null | null | null | null | null | null | null | my_implementation_llama_new_new6 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | 32,000 | null | null | null | null | null | null |
kadeyshvili/my_implementation_llama_new_new7 | null | null | "2024-11-11T15:07:04Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"my_implementation_llama_new_new7",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAModel"
] | null | null | null | null | null | null | null | my_implementation_llama_new_new7 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | 32,000 | null | null | null | null | null | null |
MrRobotoAI/Freyja-7b-v1 | null | null | "2024-11-11T15:37:13Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2403.19522",
"base_model:Azazelle/L3-Daybreak-8b-lora",
"base_model:merge:Azazelle/L3-Daybreak-8b-lora",
"base_model:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:merge:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:merge:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:Azazelle/Llama-3-LongStory-LORA",
"base_model:merge:Azazelle/Llama-3-LongStory-LORA",
"base_model:Azazelle/Nimue-8B",
"base_model:merge:Azazelle/Nimue-8B",
"base_model:Azazelle/Smarts_Llama3",
"base_model:merge:Azazelle/Smarts_Llama3",
"base_model:Blackroot/Llama-3-LongStory-LORA",
"base_model:merge:Blackroot/Llama-3-LongStory-LORA",
"base_model:Blackroot/Llama3-RP-Lora",
"base_model:merge:Blackroot/Llama3-RP-Lora",
"base_model:MrRobotoAI/Hathor-v4.7",
"base_model:merge:MrRobotoAI/Hathor-v4.7",
"base_model:ResplendentAI/Aura_Llama3",
"base_model:merge:ResplendentAI/Aura_Llama3",
"base_model:ResplendentAI/BlueMoon_Llama3",
"base_model:merge:ResplendentAI/BlueMoon_Llama3",
"base_model:ResplendentAI/Luna_Llama3",
"base_model:merge:ResplendentAI/Luna_Llama3",
"base_model:ResplendentAI/NoWarning_Llama3",
"base_model:merge:ResplendentAI/NoWarning_Llama3",
"base_model:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:merge:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:luvGPT/mistral-7b-uncensored",
"base_model:merge:luvGPT/mistral-7b-uncensored",
"base_model:nicce/story-mixtral-8x7b-lora",
"base_model:merge:nicce/story-mixtral-8x7b-lora",
"base_model:nothingiisreal/llama3-8B-DWP-lora",
"base_model:merge:nothingiisreal/llama3-8B-DWP-lora",
"base_model:surya-narayanan/college_chemistry",
"base_model:merge:surya-narayanan/college_chemistry",
"base_model:surya-narayanan/college_computer_science",
"base_model:merge:surya-narayanan/college_computer_science",
"base_model:surya-narayanan/college_physics",
"base_model:merge:surya-narayanan/college_physics",
"base_model:surya-narayanan/conceptual_physics",
"base_model:merge:surya-narayanan/conceptual_physics",
"base_model:surya-narayanan/electrical_engineering",
"base_model:merge:surya-narayanan/electrical_engineering",
"base_model:surya-narayanan/formal_logic",
"base_model:merge:surya-narayanan/formal_logic",
"base_model:surya-narayanan/high_school_european_history",
"base_model:merge:surya-narayanan/high_school_european_history",
"base_model:surya-narayanan/human_sexuality",
"base_model:merge:surya-narayanan/human_sexuality",
"base_model:surya-narayanan/philosophy",
"base_model:merge:surya-narayanan/philosophy",
"base_model:surya-narayanan/philosophy_100",
"base_model:merge:surya-narayanan/philosophy_100",
"base_model:surya-narayanan/philosophy_non_masked",
"base_model:merge:surya-narayanan/philosophy_non_masked",
"base_model:surya-narayanan/physics_non_masked",
"base_model:merge:surya-narayanan/physics_non_masked",
"base_model:surya-narayanan/psychology",
"base_model:merge:surya-narayanan/psychology",
"base_model:surya-narayanan/psychology_non_masked",
"base_model:merge:surya-narayanan/psychology_non_masked",
"base_model:surya-narayanan/sociology",
"base_model:merge:surya-narayanan/sociology",
"base_model:surya-narayanan/world_religions",
"base_model:merge:surya-narayanan/world_religions",
"base_model:vincentyandex/lora_llama3_chunked_novel_bs128",
"base_model:merge:vincentyandex/lora_llama3_chunked_novel_bs128",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | float16 | 4.46.2 | true | 32,000 | null | 0 | 128 | null | null | null |
CanerAI/gpt2-turkishReviews-ds | null | null | "2024-11-11T15:47:26Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"tf",
"gpt2",
"text-generation",
"generated_from_keras_callback",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | null | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
zelk12/MT1-Gen2-MA-gemma-2-RAv0.1t0.25MT4-9B | null | null | "2024-11-11T16:06:47Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:merge:zelk12/MT4-Gen1-gemma-2-9B",
"base_model:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"base_model:merge:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e1_lr_3e-7_1731294150 | null | null | "2024-11-11T16:08:32Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
bmconrad/kjv-gpt2 | null | null | "2024-11-11T16:11:44Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
avemio-digital/mistral7b_orpo_three_datasets_lora_1880 | null | null | "2024-11-11T16:21:20Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.44.2 | true | 32,832 | null | 0 | 128 | null | null | null |
NMSK/mistral_7b_finetuned_sysmlv2_test | null | null | "2024-11-11T16:21:33Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | float32 | 4.47.0.dev0 | false | 32,000 | null | 0 | 128 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e5_lr_3e-7_1731331966 | null | null | "2024-11-11T16:21:57Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
Aarushhh/untrained-SEWY-127M | null | null | "2024-11-11T16:24:37Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 1 | 2 | relu2 | 384 | 0.02 | 1,536 | 8,192 | gemma2 | 12 | 24 | 2 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.45.1 | true | 49,152 | false | 0 | 256 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e4_lr_3e-7_1731326977 | null | null | "2024-11-11T16:27:11Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e6_lr_3e-7_1731336958 | null | null | "2024-11-11T16:28:47Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
NMSK/mistral_7b_finetuned_sysmlv2_test_ | null | null | "2024-11-11T16:32:29Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | float32 | 4.47.0.dev0 | false | 32,000 | null | 0 | 128 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e3_lr_3e-7_1731322045 | null | null | "2024-11-11T16:33:39Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
zelk12/MT1-Gen2-MM-gemma-2-RAv0.1t0.25Dv1-9B | null | null | "2024-11-11T16:35:18Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:sam-paech/Delirium-v1",
"base_model:merge:sam-paech/Delirium-v1",
"base_model:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"base_model:merge:zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
GitBag/reasoning_rebel_iter_2_1731046941_eta_1e2_lr_3e-7_1731317043 | null | null | "2024-11-11T16:38:56Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
zelk12/MT1-Gen2-BI-gemma-2-9B | null | null | "2024-11-11T16:52:46Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:zelk12/MT1-Gen2-BB-gemma-2-MT1MT4-9B",
"base_model:merge:zelk12/MT1-Gen2-BB-gemma-2-MT1MT4-9B",
"base_model:zelk12/MT1-Gen2-IF-gemma-2-MT1Qv1-9B",
"base_model:merge:zelk12/MT1-Gen2-IF-gemma-2-MT1Qv1-9B",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
odunola/immediate | null | null | "2024-11-11T16:56:40Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"moondream1",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Moondream"
] | null | null | null | null | null | null | null | moondream1 | null | null | null | null | null | null | null | float16 | 4.44.2 | null | null | null | null | null | null | null | null |
xxhe/esci-dpo-mistral-7b-instruct-iter-0 | null | null | "2024-11-11T17:09:45Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.1 | true | 32,768 | null | 0 | 128 | null | null | null |
rudradhar/llama-finetuning-merged | null | null | "2024-11-11T17:10:56Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
xxhe/esci-dpo-mistral-7b-instruct-iter-1 | null | null | "2024-11-11T17:16:30Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.1 | true | 32,768 | null | 0 | 128 | null | null | null |
emozilla/llama2-1.2b-init-6 | null | null | "2024-11-11T17:33:41Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 2,048 | llama | 16 | 16 | 16 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.1 | true | 32,000 | false | 0 | 128 | false | 1 | null |
TmBoris/custom-llama2 | null | null | "2024-11-11T17:41:58Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMA_model"
] | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | float32 | 4.46.1 | null | 32,000 | null | null | null | null | null | null |
Just-ln-Case/super_linear_mbda | null | null | "2024-11-11T17:42:50Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"super_linear_mbda",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"SuperLinearMBDA"
] | null | null | null | null | null | null | null | super_linear_mbda | null | null | null | null | null | null | null | float32 | 4.44.2 | null | null | null | null | null | null | null | null |
blurridge/KellyAI-FT-Mistral-7B-Instruct-v0.3 | null | null | "2024-11-11T17:52:41Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float32 | 4.44.0.dev0 | false | 32,768 | null | 0 | 128 | null | null | null |
toastloaf/autotrain-gpt2-finetune-crab | null | null | "2024-11-11T17:55:58Z" | null | null | 15 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"autotrain",
"text-generation-inference",
"conversational",
"dataset:THU-KEG/Crab-SFT",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
tsadoq/corrupted_gemma_test | null | null | "2024-11-11T17:58:02Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 2,048 | 0.02 | 16,384 | 8,192 | gemma | 8 | 18 | 1 | 0.000001 | 10,000 | null | null | bfloat16 | 4.45.2 | true | 256,000 | false | 0 | 256 | null | null | null |
MarsupialAI/Stellar-Odyssey-12b-adventure-v0.0_EXL2_6bpw_H8 | null | null | "2024-11-11T18:26:39Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"6-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 14,336 | 1,024,000 | mistral | 32 | 40 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 131,072 | null | 0 | 128 | null | null | null |
BenevolenceMessiah/Qwen2.5-72B-2x-Instruct-TIES-v1.0 | null | null | "2024-11-11T18:27:36Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2306.01708",
"base_model:Qwen/Qwen2.5-72B",
"base_model:merge:Qwen/Qwen2.5-72B",
"base_model:abacusai/Dracarys2-72B-Instruct",
"base_model:merge:abacusai/Dracarys2-72B-Instruct",
"base_model:rombodawg/Rombos-LLM-V2.5-Qwen-72b",
"base_model:merge:rombodawg/Rombos-LLM-V2.5-Qwen-72b",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 8,192 | 0.02 | 29,568 | 131,072 | qwen2 | 64 | 80 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 151,665 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-32B-8bit | null | null | "2024-11-11T18:29:04Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-32B-bf16 | null | null | "2024-11-11T18:29:13Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
mlx-community/Qwen2.5-Coder-14B-Instruct-bf16 | null | null | "2024-11-11T18:29:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |