model
stringlengths 13
43
| base_model
stringclasses 3
values | revision
stringlengths 4
40
| private
bool 1
class | precision
stringclasses 3
values | weight_type
stringclasses 2
values | status
stringclasses 3
values | submitted_time
timestamp[us] | model_type
stringclasses 4
values | likes
int64 0
1.31k
| params
float64 1.24
70
| license
stringclasses 9
values | architecture
stringlengths 0
18
| sender
stringlengths 4
15
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
01-ai/Yi-1.5-9B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T10:47:19 | π’ : pretrained | 44 | 7.25 | apache-2.0 | LlamaForCausalLM | mariagrandury |
|
BSC-LT/salamandra-2b | 08316c1f2051a61dadb556a63f7a6796a3d1dbed | false | bfloat16 | Original | FINISHED | 2024-10-02T12:03:38 | π’ : pretrained | 5 | 2.253 | apache-2.0 | LlamaForCausalLM | mariagrandury |
|
BSC-LT/salamandra-7b | 968d2c40c21134ba201122737be98556a7da6727 | false | bfloat16 | Original | FINISHED | 2024-10-03T15:19:26 | π’ : pretrained | 8 | 7.768 | apache-2.0 | LlamaForCausalLM | mariagrandury |
|
CohereForAI/aya-expanse-8b | b9848575c8731981dfcf2e1f3bfbcb917a2e585d | false | float16 | Original | PENDING | 2024-10-24T17:12:46 | π’ : pretrained | 44 | 8.028 | cc-by-nc-4.0 | CohereForCausalLM | mariagrandury |
|
Danielbrdz/Barcenas-27b | b29599acbef4cde587ea49725d7d825063bad077 | false | float32 | Original | PENDING | 2024-10-02T17:48:43 | πΆ : fine-tuned | 0 | 27.227 | gemma | Gemma2ForCausalLM | Danielbrdz |
|
HiTZ/latxa-13b-v1.2 | c727529f9e3f0d5fc2d5082ea3b229f5f96684b7 | false | bfloat16 | Original | PENDING | 2024-10-24T13:50:13 | π’ : pretrained | 1 | 13 | llama2 | LlamaForCausalLM | Iker |
|
HiTZ/latxa-70b-v1.2 | 4a6a823da0f796248c3eebbea68430c147b25b62 | false | bfloat16 | Original | PENDING | 2024-10-24T13:50:24 | π’ : pretrained | 0 | 70 | llama2 | LlamaForCausalLM | Iker |
|
HiTZ/latxa-7b-v1.2 | 5a798bd370d604bab91645c12428b2fbf35525d2 | false | bfloat16 | Original | PENDING | 2024-10-24T13:50:31 | π’ : pretrained | 1 | 7 | llama2 | LlamaForCausalLM | Iker |
|
Iker/Llama-3-Instruct-Neurona-8b-v2 | 92b369f60dae263d14f496e7215ee89c80473660 | false | bfloat16 | Original | PENDING | 2024-09-26T08:53:03 | β : instruction-tuned | 2 | 8.03 | llama3 | LlamaForCausalLM | Iker |
|
LenguajeNaturalAI/leniachat-qwen2-1.5B-v0 | 031a2efebb3cc1150e46f42ba0bea9fa7b855436 | false | float32 | Original | PENDING | 2024-11-27T06:30:13 | β : instruction-tuned | 19 | 1.543 | apache-2.0 | Qwen2ForCausalLM | avacaondata |
|
Qwen/Qwen2-7B | 453ed1575b739b5b03ce3758b23befdb0967f40e | false | bfloat16 | Original | PENDING | 2024-10-01T06:38:32 | π’ : pretrained | 130 | 7.616 | apache-2.0 | Qwen2ForCausalLM | Iker |
|
Qwen/Qwen2.5-7B | d149729398750b98c0af14eb82c78cfe92750796 | false | bfloat16 | Original | PENDING | 2024-11-25T13:02:51 | π’ : pretrained | 71 | 7.616 | apache-2.0 | Qwen2ForCausalLM | ChenyangLyu |
|
bertin-project/Gromenauer-7B-Instruct | 4cee56ca72ee98ec67f4532010851c14d6b0d4e9 | false | float32 | Original | PENDING | 2024-09-25T21:55:47 | β : instruction-tuned | 2 | 7.242 | apache-2.0 | MistralForCausalLM | alvp |
|
bertin-project/Gromenauer-7B | aaff2b37b64b0cdf4ed5694ea5ee483b898a6c77 | false | float32 | Original | FINISHED | 2024-09-25T21:55:26 | π’ : pretrained | 2 | 7.242 | apache-2.0 | MistralForCausalLM | alvp |
|
bertin-project/bertin-gpt-j-6B | main | false | float32 | Original | FINISHED | 2024-09-21T08:47:19 | π’ : pretrained | 17 | 6 | apache-2.0 | GPTJForCausalLM | mariagrandury |
|
demo-leaderboard/gpt2-demo | ac3299b02780836378b9e1e68c6eead546e89f90 | false | float32 | Original | FINISHED | 2024-09-20T21:47:19 | π’ : π’ : pretrained | 0 | 7 | custom | mariagrandury |
||
google/gemma2-2b | main | false | float32 | Original | FINISHED | 2024-09-20T21:47:19 | π’ : π’ : pretrained | 342 | 2.61 | gemma | Gemma2ForCausalLM | mariagrandury |
|
gplsi/Aitana-6.3B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T05:19:20 | π’ : pretrained | 0 | 6.25 | apache-2.0 | BloomForCausalLM | mariagrandury |
|
ibm-granite/granite-3.0-8b-base | 23357b69523bd98523496a5aba1f48bdea04a137 | false | float32 | Original | PENDING | 2024-12-03T11:36:41 | π’ : pretrained | 21 | 8.171 | apache-2.0 | GraniteForCausalLM | asier-gutierrez |
|
ibm-granite/granite-3.0-8b-instruct | 8fe1e202a17f7763bd0af471253e00cc846d1c05 | false | float32 | Original | PENDING | 2024-12-03T11:36:17 | β : instruction-tuned | 180 | 8.171 | apache-2.0 | GraniteForCausalLM | asier-gutierrez |
|
marianbasti/Llama-2-13b-fp16-alpaca-spanish | 1f96ecd9c3f05e50c7d865f7718dc80b7c7369d2 | false | float32 | Original | FAILED | 2024-09-25T16:08:28 | πΆ : fine-tuned | 1 | 13 | llama2 | LlamaForCausalLM | marianbasti |
|
meta-llama/Llama-3.2-1B | 221e3535e1ac4840bdf061a12b634139c84e144c | false | bfloat16 | Original | FINISHED | 2024-10-10T14:36:19 | π’ : pretrained | 532 | 1.24 | llama3.2 | LlamaForCausalLM | mariagrandury |
|
meta-llama/Llama-3.2-3B | 5cc0ffe09ee49f7be6ca7c794ee6bd7245e84e60 | false | bfloat16 | Original | FINISHED | 2024-10-10T14:36:57 | π’ : pretrained | 213 | 3.21 | llama3.2 | LlamaForCausalLM | mariagrandury |
|
meta-llama/Meta-Llama-3.1-8B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T09:47:19 | π’ : pretrained | 877 | 8.03 | llama3.1 | LlamaForCausalLM | mariagrandury |
|
microsoft/Phi-3.5-mini-instruct | af0dfb8029e8a74545d0736d30cb6b58d2f0f3f0 | false | float32 | Original | PENDING | 2024-12-03T11:35:21 | β : instruction-tuned | 664 | 3.821 | mit | Phi3ForCausalLM | asier-gutierrez |
|
microsoft/phi-1_5 | main | false | float16 | Original | FINISHED | 2024-09-21T12:47:19 | π’ : pretrained | 1,310 | 1.42 | mit | PhiForCausalLM | mariagrandury |
|
mistralai/Mistral-7B-v0.3 | main | false | bfloat16 | Original | FINISHED | 2024-09-21T10:47:19 | π’ : pretrained | 363 | 7.25 | apache-2.0 | MistralForCausalLM | mariagrandury |
|
occiglot/occiglot-7b-es-en | main | false | float32 | Original | FINISHED | 2024-09-21T03:47:19 | π’ : pretrained | 4 | 7.24 | apache-2.0 | MistralForCausalLM | mariagrandury |
|
orai-nlp/Llama-eus-8B | 75b5645d222047b517a7a9190922ea1b5382c71f | false | bfloat16 | Original | PENDING | 2024-10-01T08:20:05 | π’ : pretrained | 3 | 8.03 | null | LlamaForCausalLM | andercorral |
|
projecte-aina/FLOR-6.3B | main | false | float16 | Original | FINISHED | 2024-09-21T00:47:19 | π’ : pretrained | 29 | 6.25 | apache-2.0 | BloomForCausalLM | mariagrandury |
|
projecte-aina/aguila-7b | main | false | float16 | Original | FINISHED | 2024-09-25T14:39:00 | π’ : pretrained | 54 | 6.85 | apache-2.0 | RWForCausalLM | mariagrandury |
|
proxectonos/Carballo-bloom-1.3B | main | false | float16 | Original | FINISHED | 2024-09-20T22:47:19 | π’ : pretrained | 5 | 1.31 | mit | BloomForCausalLM | mariagrandury |
|
proxectonos/Llama-3.1-Carballo | Llama-3.1-8B | 78957ff438e4c527c52b6a3cc689510502db3a4f | false | bfloat16 | Original | FINISHED | 2024-10-19T21:42:09 | π’ : pretrained | 0 | 8.03 | llama3.1 | LlamaForCausalLM | gamallo |
sandbox-ai/Llama-3.1-Tango-70b | nvidia/Llama-3.1-Nemotron-70B-Instruct-HF | 732364fa06f0b56e9648ad9265e86f479456d161 | false | bfloat16 | Adapter | PENDING | 2024-12-06T20:13:04 | β : instruction-tuned | 6 | 70 | llama3.1 | ? | tatakof |
sandbox-ai/Llama-3.1-Tango-8b-f16 | 6be7482100037da375ba586234c59c5ccaad7ec1 | false | float16 | Original | PENDING | 2024-12-08T04:05:41 | πΆ : fine-tuned | 0 | 8.03 | llama3.1 | LlamaForCausalLM | tatakof |
|
tiiuae/falcon-7b | main | false | bfloat16 | Original | FINISHED | 2024-09-21T13:47:19 | π’ : pretrained | 1,070 | 7 | apache-2.0 | FalconForCausalLM | mariagrandury |
|
utter-project/EuroLLM-1.7B-Instruct | d68ee98cc97c3d5bda6d96d50ba7ce9f8d6ff631 | false | bfloat16 | Original | PENDING | 2024-09-26T10:40:51 | β : instruction-tuned | 17 | 1.657 | apache-2.0 | LlamaForCausalLM | Iker |
|
utter-project/EuroLLM-1.7B | 04f6500b744c641e397044e167026a1cb9385eff | false | bfloat16 | Original | PENDING | 2024-10-24T13:31:37 | π’ : pretrained | 34 | 1.7 | apache-2.0 | LlamaForCausalLM | Iker |
README.md exists but content is empty.
- Downloads last month
- 1,579
Size of downloaded dataset files:
14.9 kB
Size of the auto-converted Parquet files:
151 kB
Number of rows:
38