id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
licmajster/my_small_gpt2_cswiki | null | null | "2024-11-08T15:39:09Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
jw-hf-test/jw-14B-216 | null | null | "2024-11-08T15:48:57Z" | null | null | 642 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 18,752 | 4,096 | llama | 32 | 48 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.1 | false | 100,288 | false | 0 | 128 | false | 1 | null |
RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter3 | null | null | "2024-11-08T16:01:57Z" | null | null | 21 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter2",
"base_model:finetune:RyanYr/self-reflect_ministral8Bit_mMQA_dpo_iter2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
jinyuan22/promogen2-small | null | null | "2024-11-08T16:33:11Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.38.2 | true | 10 | null | null | null | null | null | null |
yiksiu/EuroLLM-1.7B-DPO-v2 | null | null | "2024-11-08T16:33:12Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 4 | silu | 2,048 | 0.02 | 5,632 | 4,096 | llama | 16 | 24 | 8 | 0.00001 | 10,000 | null | false | float32 | 4.46.2 | false | 128,000 | false | 0 | 128 | false | 1 | null |
jinyuan22/promogen2-base | null | null | "2024-11-08T16:39:59Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.38.2 | true | 10 | null | null | null | null | null | null |
devngho/llama-tiny-random | null | null | "2024-11-08T16:54:36Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 768 | 0.02 | 3,072 | 2,048 | llama | 12 | 12 | 12 | 0.00001 | 10,000 | null | false | float32 | 4.45.0.dev0 | true | 14,474 | false | 0 | null | false | 1 | null |
deepnet/SN29-C00-llama-HK1-2 | null | null | "2024-11-08T16:54:36Z" | null | null | 82 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
Youlln/ECE-PRYMMAL-YL-1B-SLERP-V1 | null | null | "2024-11-08T16:58:14Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
xxhe/esci-sft-mistral-7b-instruct | null | null | "2024-11-08T17:02:42Z" | null | null | 81 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.1 | true | 32,768 | null | 0 | 128 | null | null | null |
Youlln/ECE-PRYMMAL-YL-1B-SLERP-V2 | null | null | "2024-11-08T17:03:21Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 1,536 | 0.02 | 8,960 | 131,072 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
lalainy/ECE-PRYMMAL-YL-6B-SLERP-V1 | null | null | "2024-11-08T17:43:13Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 4 | 0.000001 | 5,000,000 | null | false | float32 | 4.46.2 | false | 64,000 | false | 0 | 128 | false | 1 | null |
MaziyarPanahi/calme-3.2-qwenloi-3b-GGUF | null | null | "2024-11-08T17:49:35Z" | null | null | 76 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.2-qwenloi-3b",
"base_model:quantized:MaziyarPanahi/calme-3.2-qwenloi-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
deepnet/SN29-C00-llama-HK2-2 | null | null | "2024-11-08T17:53:33Z" | null | null | 78 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
jsulz/1731088798-SmolLM2-135Mrandom | null | null | "2024-11-08T18:00:27Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"en",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 0 | 0 | silu | 576 | 0.041667 | 1,536 | 8,192 | llama | 9 | 30 | 3 | 0.00001 | 100,000 | null | true | bfloat16 | 4.40.1 | true | 49,152 | false | 0 | null | null | 1 | null |
GitBag/reasoning_rebel_eta_1e3_lr_3e-7_1731041913 | null | null | "2024-11-08T18:04:41Z" | null | null | 79 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
GitBag/reasoning_rebel_eta_1e4_lr_3e-7_1731046941 | null | null | "2024-11-08T18:12:55Z" | null | null | 63 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
GitBag/reasoning_rebel_eta_1e2_lr_3e-7_1731036923 | null | null | "2024-11-08T18:18:44Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.2 | true | 128,256 | false | 0 | null | false | 1 | null |
deepnet/SN29-C00-llama-HK3New-1 | null | null | "2024-11-08T18:29:11Z" | null | null | 78 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | false | 1 | null |
NathanRoll/gpt2-nyt-1988 | null | null | "2024-11-08T18:46:02Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
motexture/SmolLCoder-1.7B-Instruct | null | null | "2024-11-08T19:08:14Z" | null | null | 2 | null | null | null | null | 0 | null | [
"safetensors",
"llama",
"smoll",
"coding",
"coder",
"model",
"small",
"text-generation",
"conversational",
"en",
"dataset:motexture/cData",
"base_model:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"base_model:finetune:HuggingFaceTB/SmolLM2-1.7B-Instruct",
"license:apache-2.0",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 8,192 | llama | 32 | 24 | 32 | 0.00001 | 130,000 | null | true | float16 | 4.46.1 | true | 49,152 | false | 0 | 64 | false | 1 | null |
MaziyarPanahi/calme-3.3-qwenloi-3b-GGUF | null | null | "2024-11-08T20:16:47Z" | null | null | 79 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.3-qwenloi-3b",
"base_model:quantized:MaziyarPanahi/calme-3.3-qwenloi-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
MaziyarPanahi/calme-3.1-llamaloi-3b-GGUF | null | null | "2024-11-08T20:28:52Z" | null | null | 83 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.1-llamaloi-3b",
"base_model:quantized:MaziyarPanahi/calme-3.1-llamaloi-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
NathanRoll/gpt2-nyt-1989 | null | null | "2024-11-08T20:35:07Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
brgx53/3Blarenegv2-ECE-PRYMMAL-Martial | null | null | "2024-11-08T20:54:19Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Tsunami-th/Tsunami-0.5x-7B-Instruct",
"base_model:merge:Tsunami-th/Tsunami-0.5x-7B-Instruct",
"base_model:fblgit/cybertron-v4-qw7B-MGS",
"base_model:merge:fblgit/cybertron-v4-qw7B-MGS",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
craa/100M_low_100_495 | null | null | "2024-11-08T21:43:25Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 52,000 | null | null | null | null | null | null |
craa/100M_low_100_634 | null | null | "2024-11-08T21:43:25Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 52,000 | null | null | null | null | null | null |
prakcoin/Qwen2.5-Chess | null | null | "2024-11-08T22:10:07Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"conversational",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
brgx53/3Bgeneralv2-ECE-PRYMMAL-Martial | null | null | "2024-11-08T22:17:00Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Tsunami-th/Tsunami-0.5x-7B-Instruct",
"base_model:merge:Tsunami-th/Tsunami-0.5x-7B-Instruct",
"base_model:fblgit/cybertron-v4-qw7B-MGS",
"base_model:merge:fblgit/cybertron-v4-qw7B-MGS",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
async0x42/EVA-Qwen2.5-72B-v0.1-exl2_3.5bpw | null | null | "2024-11-08T22:21:36Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"conversational",
"dataset:anthracite-org/kalo-opus-instruct-22k-no-refusal",
"dataset:Nopm/Opus_WritingStruct",
"dataset:Gryphe/Sonnet3.5-SlimOrcaDedupCleaned",
"dataset:Gryphe/Sonnet3.5-Charcard-Roleplay",
"dataset:Gryphe/ChatGPT-4o-Writing-Prompts",
"dataset:Epiculous/Synthstruct-Gens-v1.1-Filtered-n-Cleaned",
"dataset:Epiculous/SynthRP-Gens-v1.1-Filtered-n-Cleaned",
"dataset:nothingiisreal/Reddit-Dirty-And-WritingPrompts",
"dataset:allura-org/Celeste-1.x-data-mixture",
"dataset:cognitivecomputations/dolphin-2.9.3",
"base_model:Qwen/Qwen2.5-72B",
"base_model:quantized:Qwen/Qwen2.5-72B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | null | 151,643 | silu | 8,192 | 0.02 | 29,568 | 131,072 | qwen2 | 64 | 80 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.1 | false | 152,064 | null | 0 | null | null | null | null |
yjwon/mpg27_mistral7bv3_sft | null | null | "2024-11-08T22:34:31Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
NathanRoll/gpt2-nyt-1990 | null | null | "2024-11-08T22:35:21Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
yjwon/mpg27_gemma9b_sft | null | null | "2024-11-08T22:36:31Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.47.0.dev0 | true | 256,000 | false | 0 | 256 | null | null | null |
craa/100M_low_100_6910 | null | null | "2024-11-08T22:40:24Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 52,000 | null | null | null | null | null | null |
craa/100M_low_100_1208 | null | null | "2024-11-08T22:40:24Z" | null | null | 14 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 52,000 | null | null | null | null | null | null |
craa/100M_low_100_8397 | null | null | "2024-11-08T22:40:24Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | true | 52,000 | null | null | null | null | null | null |
yiran-wang3/ds_coder_reflct_adamw_iter2 | null | null | "2024-11-08T22:47:18Z" | null | null | 80 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"alignment-handbook",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"dataset:self-generate/ds_coder_reflct_sppo_hard_new_cn_mining_oj_iter1-binarized-reflection-scored",
"base_model:yiran-wang3/ds_coder_reflct_adamw_iter1",
"base_model:finetune:yiran-wang3/ds_coder_reflct_adamw_iter1",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.0 | true | 102,400 | false | 0 | 128 | false | 1 | null |
async0x42/Qwen2.5.1-Coder-7B-Instruct-exl2_5.0bpw | null | null | "2024-11-08T23:29:04Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-7B",
"base_model:quantized:Qwen/Qwen2.5-Coder-7B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"5-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.44.0 | true | 152,064 | null | 0 | null | null | null | null |
MichaelHu03/CS6220-GPT | null | null | "2024-11-09T00:06:48Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openai-gpt",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"OpenAIGPTLMHeadModel"
] | 40,478 | 40,479 | null | null | 0.02 | null | null | openai-gpt | null | null | null | null | null | null | null | bfloat16 | 4.46.2 | null | 40,480 | null | null | null | null | null | null |
ehottl/Llama-3-Open-Ko-8B-Instruct-teddynote | null | null | "2024-11-09T00:14:07Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:beomi/Llama-3-Open-Ko-8B-Instruct-preview",
"base_model:finetune:beomi/Llama-3-Open-Ko-8B-Instruct-preview",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
lalainy/ECE-PRYMMAL-YL-6B-SLERP-V2 | null | null | "2024-11-09T00:22:55Z" | null | null | 12 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 4 | 0.000001 | 5,000,000 | null | false | float32 | 4.46.2 | false | 64,000 | false | 0 | 128 | false | 1 | null |
RyanYr/self-reflect_ministral8Bit_math_dpo_iter1 | null | null | "2024-11-09T00:37:12Z" | null | null | 52 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:mistralai/Ministral-8B-Instruct-2410",
"base_model:finetune:mistralai/Ministral-8B-Instruct-2410",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
NathanRoll/gpt2-nyt-1991 | null | null | "2024-11-09T01:08:24Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
Sakalti/petalon1 | null | null | "2024-11-09T01:25:07Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2403.19522",
"base_model:Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v1",
"base_model:merge:Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v1",
"base_model:Qwen/Qwen2.5-1.5B-Instruct",
"base_model:merge:Qwen/Qwen2.5-1.5B-Instruct",
"base_model:Qwen/Qwen2.5-Coder-1.5B-Instruct",
"base_model:merge:Qwen/Qwen2.5-Coder-1.5B-Instruct",
"base_model:Qwen/Qwen2.5-Math-1.5B-Instruct",
"base_model:merge:Qwen/Qwen2.5-Math-1.5B-Instruct",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
yiran-wang3/ds_coder_reflct_adamw_iter3 | null | null | "2024-11-09T01:35:48Z" | null | null | 78 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"alignment-handbook",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"dataset:self-generate/ds_coder_reflct_sppo_hard_new_cn_mining_oj_iter2-binarized-reflection-scored",
"base_model:yiran-wang3/ds_coder_reflct_adamw_iter2",
"base_model:finetune:yiran-wang3/ds_coder_reflct_adamw_iter2",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.0 | true | 102,400 | false | 0 | 128 | false | 1 | null |
overfit-brothers/KRX-ver28 | null | null | "2024-11-09T02:09:39Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"krx",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 152,064 | null | 0 | null | null | null | null |
NathanRoll/gpt2-nyt-1992 | null | null | "2024-11-09T02:37:16Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
dbuddenbaum/Mistral-7B-Fine_Tunned_Model | null | null | "2024-11-09T02:52:59Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 32,000 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | bfloat16 | 4.46.2 | false | 32,032 | null | 0 | 128 | null | null | null |
jw-hf-test/jw-14B-217 | null | null | "2024-11-09T03:31:41Z" | null | null | 568 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 18,752 | 4,096 | llama | 32 | 48 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.1 | true | 100,288 | false | 0 | null | false | 1 | null |
Hjgugugjhuhjggg/mergekit-passthrough-vnrjyuc | null | null | "2024-11-09T03:51:56Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"mergekit",
"merge",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | bfloat16 | 4.46.1 | true | 50,257 | null | null | null | null | null | null |
lilmeaty/gpt2 | null | null | "2024-11-09T03:53:51Z" | null | null | 10 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"mergekit",
"merge",
"base_model:openai-community/gpt2-large",
"base_model:finetune:openai-community/gpt2-large",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | bfloat16 | 4.46.1 | true | 50,257 | null | null | null | null | null | null |
Clark88/TinyLlama-QNA-Colab20epoch | null | null | "2024-11-09T04:01:13Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float16 | 4.46.2 | true | 32,000 | false | 0 | 64 | false | 1 | null |
mjschock/TinyLlama-1.1B-Chat-v1.0 | null | null | "2024-11-09T04:13:52Z" | null | null | 446 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
yiran-wang3/ds_coder_reflct_adamw_iter4 | null | null | "2024-11-09T04:23:41Z" | null | null | 77 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"alignment-handbook",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"dataset:self-generate/ds_coder_reflct_sppo_hard_new_cn_mining_oj_iter3-binarized-reflection-scored",
"base_model:yiran-wang3/ds_coder_reflct_adamw_iter3",
"base_model:finetune:yiran-wang3/ds_coder_reflct_adamw_iter3",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,000 | 100,015 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 30 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.45.0 | true | 102,400 | false | 0 | 128 | false | 1 | null |
Statuo/MiS-Firefly-v0.2-22b-EXL2-8bpw | null | null | "2024-11-09T04:26:39Z" | null | null | 11 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"not-for-all-audiences",
"axolotl",
"qlora",
"conversational",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | false | 32,768 | null | 0 | 128 | null | null | null |
Statuo/MiS-Firefly-v0.2-22b-EXL2-6bpw | null | null | "2024-11-09T04:26:48Z" | null | null | 16 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"not-for-all-audiences",
"axolotl",
"qlora",
"conversational",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"6-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 6,144 | 0.02 | 16,384 | 32,768 | mistral | 48 | 56 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.44.2 | false | 32,768 | null | 0 | 128 | null | null | null |
sergeyborss/mistral_fine_tuning_first | null | null | "2024-11-09T04:50:39Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float32 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
navneetthakor/LLFG-2 | null | null | "2024-11-09T05:15:14Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"GemmaForCausalLM"
] | 2 | 1 | gelu | 2,048 | 0.02 | 16,384 | 8,192 | gemma | 8 | 18 | 1 | 0.000001 | 10,000 | null | null | float16 | 4.46.2 | true | 256,000 | false | 0 | 256 | null | null | null |
dasiy/M1_LLAMA2 | null | null | "2024-11-09T05:24:21Z" | null | null | 62 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
NathanRoll/gpt2-nyt-1993 | null | null | "2024-11-09T05:46:11Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
RohitAhire/gita-text-generation-gpt2 | null | null | "2024-11-09T06:10:17Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
smortlly/mistral_fine_tuning_second | null | null | "2024-11-09T06:51:35Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float32 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
NathanRoll/gpt2-nyt-1994 | null | null | "2024-11-09T06:51:57Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
EtherealWhisper/SmolLM2-1.7B-ShippingEmail-FineTuned | null | null | "2024-11-09T07:04:47Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 8,192 | llama | 32 | 24 | 32 | 0.00001 | 130,000 | null | true | float32 | 4.44.2 | true | 49,152 | false | 0 | null | false | 1 | null |
NathanRoll/gpt2-nyt-1995 | null | null | "2024-11-09T07:59:59Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
Dbmaxwell/finansal_analiz | null | null | "2024-11-09T08:20:46Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.44.2 | true | 50,257 | null | null | null | null | null | null |
9sub/llama3_10epoch | null | null | "2024-11-09T09:04:12Z" | null | null | 72 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,009 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float32 | 4.40.1 | false | 128,256 | false | 0 | null | false | 1 | null |
hamaadayubkhan/Psychologist | null | null | "2024-11-09T09:22:09Z" | null | null | 54 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"en",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
grpathak22/bloom-marathi-fo | null | null | "2024-11-09T09:28:54Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"bloom",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"BloomForCausalLM"
] | 1 | 2 | null | 1,024 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 250,880 | null | 0 | null | null | 1 | null |
Sayankotor/LoraLlama | null | null | "2024-11-09T10:00:37Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.41.2 | false | 32,000 | false | 0 | null | false | 1 | null |
devshaheen/Llama-2-7b-chat-finetune | null | null | "2024-11-09T10:19:57Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"instruction-following",
"fine-tuned",
"llama2",
"causal-language-model",
"QLoRa",
"4-bit-quantization",
"low-memory",
"training-optimized",
"en",
"dataset:mlabonne/guanaco-llama2-1k",
"base_model:NousResearch/Llama-2-7b-chat-hf",
"base_model:finetune:NousResearch/Llama-2-7b-chat-hf",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
lalainy/ECE-PRYMMAL-YL-0.5B-SLERP-BIS-V1 | null | null | "2024-11-09T11:07:23Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
ihughes15234/phi35_kp_dpo1epoch_1e6lr | null | null | "2024-11-09T11:18:33Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:ihughes15234/phi_3_5_mini_kp_12k_cfr_sft",
"base_model:finetune:ihughes15234/phi_3_5_mini_kp_12k_cfr_sft",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | bfloat16 | 4.46.2 | true | 32,064 | false | 0 | 96 | false | 1 | null |
lalainy/ECE-PRYMMAL-YL-0.5B-SLERP-BIS-V2 | null | null | "2024-11-09T11:21:39Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
MatteoVan/layoutlmv3-test-decoder-1024 | null | null | "2024-11-09T11:42:51Z" | null | null | 9 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"bart",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"BartForCausalLM"
] | 0 | 2 | null | null | null | null | 1,024 | bart | null | 6 | null | null | null | null | null | float32 | 4.45.1 | true | 50,277 | null | 0.1 | null | null | null | null |
cmeraki/hf-tts-speakermashup | null | null | "2024-11-09T12:03:07Z" | null | null | 24 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.2 | true | 59,968 | null | null | null | null | null | null |
rkingzhong/qwen2.5-3b-classical-chinese-trans | null | null | "2024-11-09T12:29:30Z" | null | null | 4 | null | null | null | null | 0 | null | [
"safetensors",
"qwen2",
"classical",
"chinese",
"qwen2.5",
"text-generation",
"conversational",
"zh",
"license:apache-2.0",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.46.1 | true | 151,936 | null | 0 | null | null | null | null |
c01zaut/gemma-2-9b-rk3588-1.1.2 | null | null | "2024-11-09T12:35:07Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"arxiv:2009.03300",
"arxiv:1905.07830",
"arxiv:1911.11641",
"arxiv:1904.09728",
"arxiv:1905.10044",
"arxiv:1907.10641",
"arxiv:1811.00937",
"arxiv:1809.02789",
"arxiv:1911.01547",
"arxiv:1705.03551",
"arxiv:2107.03374",
"arxiv:2108.07732",
"arxiv:2110.14168",
"arxiv:2009.11462",
"arxiv:2101.11718",
"arxiv:2110.08193",
"arxiv:1804.09301",
"arxiv:2109.07958",
"arxiv:1804.06876",
"arxiv:2103.03874",
"arxiv:2304.06364",
"arxiv:2206.04615",
"arxiv:2203.09509",
"license:gemma",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.42.0.dev0 | true | 256,000 | false | 0 | 256 | null | null | null |
Qwen/Qwen2.5-Coder-0.5B-Instruct-GPTQ-Int8 | null | null | "2024-11-09T12:42:57Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.39.3 | true | 151,936 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-0.5B-Instruct-GPTQ-Int4 | null | null | "2024-11-09T12:43:15Z" | null | null | 7 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.39.3 | true | 151,936 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-3B-Instruct-GPTQ-Int8 | null | null | "2024-11-09T12:43:25Z" | null | null | 3 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.39.3 | true | 151,936 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-3B-Instruct-GPTQ-Int4 | null | null | "2024-11-09T12:43:36Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.39.3 | true | 151,936 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-14B-Instruct-GPTQ-Int8 | null | null | "2024-11-09T12:43:53Z" | null | null | 27 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | float16 | 4.39.3 | true | 152,064 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-14B-Instruct-GPTQ-Int4 | null | null | "2024-11-09T12:44:18Z" | null | null | 40 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | float16 | 4.39.3 | true | 152,064 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-3B-Instruct-AWQ | null | null | "2024-11-09T12:46:06Z" | null | null | 25 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-3B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-3B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.41.1 | true | 151,936 | null | 0 | null | null | null | null |
Qwen/Qwen2.5-Coder-0.5B-Instruct-AWQ | null | null | "2024-11-09T12:46:25Z" | null | null | 23 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-0.5B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | 32,768 | true | float16 | 4.41.1 | true | 151,936 | null | 0 | null | null | null | null |
OpenDFM/ChemDFM-v1.5-8B | null | null | "2024-11-09T13:09:07Z" | null | null | 2 | null | null | null | null | 0 | null | [
"safetensors",
"llama",
"chemistry",
"text-generation",
"conversational",
"en",
"zh",
"arxiv:2401.14818",
"license:agpl-3.0",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.40.0 | true | 128,264 | false | 0 | null | null | 1 | null |
cmeraki/hf-audio-continue | null | null | "2024-11-09T13:27:26Z" | null | null | 100 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.2 | true | 59,968 | null | null | null | null | null | null |
xguman/my_small_gpt2_zlatyfond | null | null | "2024-11-09T14:11:21Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
MaziyarPanahi/calme-3.3-llamaloi-3b-GGUF | null | null | "2024-11-09T14:23:41Z" | null | null | 85 | null | null | null | null | 0 | null | [
"gguf",
"mistral",
"quantized",
"2-bit",
"3-bit",
"4-bit",
"5-bit",
"6-bit",
"8-bit",
"GGUF",
"text-generation",
"base_model:MaziyarPanahi/calme-3.3-llamaloi-3b",
"base_model:quantized:MaziyarPanahi/calme-3.3-llamaloi-3b",
"region:us",
"conversational"
] | text-generation | null | null | 0 | null | null | null | null | null | null | null | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
betteib/gp2_basemodel_v11 | null | null | "2024-11-09T14:41:05Z" | null | null | 51 | null | null | null | null | 0 | transformers | [
"transformers",
"tensorboard",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.45.1 | true | 54,272 | null | null | null | null | null | null |
waldie/TQ2.5-14B-Sugarquill-v1-8bpw-h8-exl2 | null | null | "2024-11-09T15:38:20Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"conversational",
"en",
"dataset:Mielikki/Erebus-87k",
"dataset:allura-org/r_shortstories_24k",
"base_model:allura-org/TQ2.5-14B-Sugarquill-v1",
"base_model:quantized:allura-org/TQ2.5-14B-Sugarquill-v1",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | null | 151,645 | silu | 5,120 | 0.02 | 13,824 | 131,072 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.45.1 | false | 152,064 | null | 0 | null | null | null | null |
cgus/Qwen2.5-14B-Instruct-abliterated-exl2 | null | null | "2024-11-09T16:01:19Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"qwen2",
"text-generation",
"chat",
"abliterated",
"uncensored",
"conversational",
"en",
"base_model:huihui-ai/Qwen2.5-14B-Instruct-abliterated",
"base_model:quantized:huihui-ai/Qwen2.5-14B-Instruct-abliterated",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"exl2",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
kxdw2580/test-qwen2.5-paimon-part1 | null | null | "2024-11-09T16:10:23Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"llama-factory",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | bfloat16 | 4.45.2 | true | 151,936 | null | 0 | null | null | null | null |
RyanYr/self-reflect_ministral8Bit_math_dpo_iter2 | null | null | "2024-11-09T16:58:18Z" | null | null | 19 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"generated_from_trainer",
"trl",
"dpo",
"conversational",
"arxiv:2305.18290",
"base_model:RyanYr/self-reflect_ministral8Bit_math_dpo_iter1",
"base_model:finetune:RyanYr/self-reflect_ministral8Bit_math_dpo_iter1",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 12,288 | 32,768 | mistral | 32 | 36 | 8 | 0.00001 | 100,000,000 | 32,768 | false | bfloat16 | 4.45.2 | false | 131,073 | null | 0 | 128 | null | null | null |
KartikGPT/Llama-2-7b-chat-finetune | null | null | "2024-11-09T17:07:06Z" | null | null | 4 | null | null | null | null | 0 | null | [
"pytorch",
"llama",
"text-generation",
"en",
"dataset:timdettmers/openassistant-guanaco",
"base_model:NousResearch/Llama-2-7b-chat-hf",
"base_model:finetune:NousResearch/Llama-2-7b-chat-hf",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
grpathak22/bloom-marathi-storytext | null | null | "2024-11-09T17:36:49Z" | null | null | 5 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"bloom",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"BloomForCausalLM"
] | 1 | 2 | null | 1,024 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 250,880 | null | 0 | null | null | 1 | null |
AJMALm/Gemma-2-9b-it-chat-doctor | null | null | "2024-11-09T17:48:43Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 256,000 | 256,001 | gelu_pytorch_tanh | 2,304 | 0.02 | 9,216 | 8,192 | gemma2 | 8 | 26 | 4 | 0.000001 | 10,000 | 4,096 | null | float16 | 4.46.2 | true | 256,002 | false | 0 | 256 | null | null | null |
TomasHalmazna/my_small_gpt2_trains | null | null | "2024-11-09T17:54:43Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
Delse/SadeDitMoi | null | null | "2024-11-09T17:56:17Z" | null | null | 29 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"generated_from_trainer",
"base_model:openai-community/gpt2",
"base_model:finetune:openai-community/gpt2",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |