slug
stringlengths 15
15
| content
listlengths 1
129
| rawContent
stringlengths 1
2k
| author
dict | attachments
listlengths 0
49
| mentions
listlengths 0
49
| reactions
listlengths 0
12
| publishedAt
stringlengths 24
24
| updatedAt
stringlengths 24
24
| commentators
listlengths 0
52
| url
stringlengths 25
46
| totalUniqueImpressions
int64 1
42.1k
⌀ | numComments
int64 0
621
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
172855811832549 | [
{
"type": "text",
"value": "Introducing illusion Diffusion Video",
"raw": "Introducing illusion Diffusion Video",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/spaces/KingNish/IllusionDiffusionVideo",
"href": "https://huggingface.co/spaces/KingNish/IllusionDiffusionVideo",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It can Create high quality ULTRA HD illusion video. ",
"raw": "It can Create high quality ULTRA HD illusion video. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "If you find any bugs, please let me know😊",
"raw": "If you find any bugs, please let me know😊",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Introducing illusion Diffusion Video
https://huggingface.co/spaces/KingNish/IllusionDiffusionVideo
It can Create high quality ULTRA HD illusion video.
If you find any bugs, please let me know😊 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"lunarflu",
"SvCy",
"osanseviero"
],
"count": 3
},
{
"reaction": "🚀",
"users": [
"lunarflu",
"SvCy",
"osanseviero"
],
"count": 3
},
{
"reaction": "😎",
"users": [
"SvCy"
],
"count": 1
}
] | 2024-05-08T11:51:38.000Z | 2024-05-10T03:00:44.268Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64b975b696676e40d0ea08aa/fXZFY9a6JxvaQt4iUCFzl.jpeg",
"fullname": "Sourav Chakraborty",
"name": "SvCy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 7,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
}
] | /posts/KingNish/172855811832549 | 2,710 | 2 |
898938685374933 | [
{
"type": "text",
"value": "🔥 Prometheus 2 was recently released by Kaist AI as an alternative and closely mirroring both human and GPT-4 evaluation, and surpassing the former Prometheus!",
"raw": "🔥 Prometheus 2 was recently released by Kaist AI as an alternative and closely mirroring both human and GPT-4 evaluation, and surpassing the former Prometheus!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/prometheus-eval/prometheus-7b-v2.0",
"href": null,
"resource": {
"type": "model",
"id": "prometheus-eval/prometheus-7b-v2.0",
"discussionNum": null
},
"url": "https://huggingface.co/prometheus-eval/prometheus-7b-v2.0",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/prometheus-eval/prometheus-8x7b-v2.0",
"href": null,
"resource": {
"type": "model",
"id": "prometheus-eval/prometheus-8x7b-v2.0",
"discussionNum": null
},
"url": "https://huggingface.co/prometheus-eval/prometheus-8x7b-v2.0",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌬️Fine-tuned on top of ",
"raw": "🌬️Fine-tuned on top of ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
"href": null,
"resource": {
"type": "model",
"id": "mistralai/Mistral-7B-Instruct-v0.2",
"discussionNum": null
},
"url": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " and ",
"raw": " and ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
"href": null,
"resource": {
"type": "model",
"id": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"discussionNum": null
},
"url": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🗂️The datasets used for fine-tuning have been publicly released i.e. ",
"raw": "🗂️The datasets used for fine-tuning have been publicly released i.e. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/prometheus-eval/Feedback-Collection",
"href": null,
"resource": {
"type": "dataset",
"id": "prometheus-eval/Feedback-Collection",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/prometheus-eval/Feedback-Collection",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " and ",
"raw": " and ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/prometheus-eval/Preference-Collection",
"href": null,
"resource": {
"type": "dataset",
"id": "prometheus-eval/Preference-Collection",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/prometheus-eval/Preference-Collection",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤝🏻Unified LM evaluator for absolute (a single prompt-completion pair) and relative (two completions for a given prompt) due to model merging",
"raw": "🤝🏻Unified LM evaluator for absolute (a single prompt-completion pair) and relative (two completions for a given prompt) due to model merging",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "❌No longer needs a mandatory reference / golden answer, but can still be provided optionally",
"raw": "❌No longer needs a mandatory reference / golden answer, but can still be provided optionally",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔝Surpasses the former version of Prometheus, and has a high correlation with human, GPT-4, and Claude 3 Opus scores when evaluating LMs",
"raw": "🔝Surpasses the former version of Prometheus, and has a high correlation with human, GPT-4, and Claude 3 Opus scores when evaluating LMs",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📝Apache 2.0 license",
"raw": "📝Apache 2.0 license",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Long-story short, an amazing job from Kaist AI bridging the gap with LLM evaluators other than proprietary and bigger models!",
"raw": "Long-story short, an amazing job from Kaist AI bridging the gap with LLM evaluators other than proprietary and bigger models!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This week at Argilla, we decided to add a new task to use Prometheus 2 as an LLM evaluator using ",
"raw": "This week at Argilla, we decided to add a new task to use Prometheus 2 as an LLM evaluator using ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`distilabel`",
"href": null,
"resource": null,
"url": null,
"code": "distilabel",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ", so we implemented ",
"raw": ", so we implemented ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`PrometheusEval`",
"href": null,
"resource": null,
"url": null,
"code": "PrometheusEval",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "😱 Using ",
"raw": "😱 Using ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`PrometheusEval`",
"href": null,
"resource": null,
"url": null,
"code": "PrometheusEval",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " running their 7B variant with vLLM in a single L40 on top of ",
"raw": " running their 7B variant with vLLM in a single L40 on top of ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/HuggingFaceH4/instruction-dataset",
"href": null,
"resource": {
"type": "dataset",
"id": "HuggingFaceH4/instruction-dataset",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/HuggingFaceH4/instruction-dataset",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ", we got the 327 existing prompt-completion pairs evaluated and pushed to the Hub in less than 2 minutes!",
"raw": ", we got the 327 existing prompt-completion pairs evaluated and pushed to the Hub in less than 2 minutes!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Find the generated dataset and the code at ",
"raw": "Find the generated dataset and the code at ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/distilabel-internal-testing/instruction-dataset-prometheus",
"href": null,
"resource": {
"type": "dataset",
"id": "distilabel-internal-testing/instruction-dataset-prometheus",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/distilabel-internal-testing/instruction-dataset-prometheus",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔥 Prometheus 2 was recently released by Kaist AI as an alternative and closely mirroring both human and GPT-4 evaluation, and surpassing the former Prometheus!
https://huggingface.co/prometheus-eval/prometheus-7b-v2.0
https://huggingface.co/prometheus-eval/prometheus-8x7b-v2.0
🌬️Fine-tuned on top of https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2 and https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1
🗂️The datasets used for fine-tuning have been publicly released i.e. https://huggingface.co/datasets/prometheus-eval/Feedback-Collection and https://huggingface.co/datasets/prometheus-eval/Preference-Collection
🤝🏻Unified LM evaluator for absolute (a single prompt-completion pair) and relative (two completions for a given prompt) due to model merging
❌No longer needs a mandatory reference / golden answer, but can still be provided optionally
🔝Surpasses the former version of Prometheus, and has a high correlation with human, GPT-4, and Claude 3 Opus scores when evaluating LMs
📝Apache 2.0 license
Long-story short, an amazing job from Kaist AI bridging the gap with LLM evaluators other than proprietary and bigger models!
This week at Argilla, we decided to add a new task to use Prometheus 2 as an LLM evaluator using `distilabel`, so we implemented `PrometheusEval`.
😱 Using `PrometheusEval` running their 7B variant with vLLM in a single L40 on top of https://huggingface.co/datasets/HuggingFaceH4/instruction-dataset, we got the 327 existing prompt-completion pairs evaluated and pushed to the Hub in less than 2 minutes!
Find the generated dataset and the code at https://huggingface.co/datasets/distilabel-internal-testing/instruction-dataset-prometheus | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/60f0608166e5701b80ed3f02/BHso-wSWpR9b8b8CKvodC.jpeg",
"fullname": "Alvaro Bartolome",
"name": "alvarobartt",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 1739,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"matinla",
"lunarflu",
"davanstrien",
"andrewrreed",
"victor",
"NickyNicky",
"QuocKhanh",
"osanseviero",
"dhruva-sarma",
"shamy777"
],
"count": 10
},
{
"reaction": "❤️",
"users": [
"matinla",
"lunarflu",
"andrewrreed",
"QuocKhanh",
"osanseviero",
"anakin87"
],
"count": 6
}
] | 2024-05-08T11:04:53.000Z | 2024-05-08T13:41:00.840Z | [
{
"avatarUrl": "/avatars/5ffbcc32035ad5a5c83e45a4910c0541.svg",
"fullname": "matinlaghaei",
"name": "matinla",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/alvarobartt/898938685374933 | 2,984 | 1 |
813912988998606 | [
{
"type": "text",
"value": "For those who love the Arabic language like me, This is a summary of my different models, datasets and spaces i made the last couple of months:",
"raw": "For those who love the Arabic language like me, This is a summary of my different models, datasets and spaces i made the last couple of months:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1. ",
"raw": "1. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
"href": null,
"resource": {
"type": "model",
"id": "MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
"discussionNum": null
},
"url": "https://huggingface.co/MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " is a finetuned version of Meta-Llama-3-8B-Instruct using ORPO on ",
"raw": " is a finetuned version of Meta-Llama-3-8B-Instruct using ORPO on ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/2A2I/argilla-dpo-mix-7k-arabic",
"href": null,
"resource": {
"type": "dataset",
"id": "2A2I/argilla-dpo-mix-7k-arabic",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/2A2I/argilla-dpo-mix-7k-arabic",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " and the space to try it is here ",
"raw": " and the space to try it is here ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/MohamedRashad/Arabic-Chatbot-Arena",
"href": null,
"resource": {
"type": "space",
"id": "MohamedRashad/Arabic-Chatbot-Arena",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/MohamedRashad/Arabic-Chatbot-Arena",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2. ",
"raw": "2. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/MohamedRashad/arabic-small-nougat",
"href": null,
"resource": {
"type": "model",
"id": "MohamedRashad/arabic-small-nougat",
"discussionNum": null
},
"url": "https://huggingface.co/MohamedRashad/arabic-small-nougat",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " is a finetuned version of ",
"raw": " is a finetuned version of ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/facebook/nougat-small",
"href": null,
"resource": {
"type": "model",
"id": "facebook/nougat-small",
"discussionNum": null
},
"url": "https://huggingface.co/facebook/nougat-small",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " on Arabic book pages to be a capable arabic-ocr and its space is also avialable here ",
"raw": " on Arabic book pages to be a capable arabic-ocr and its space is also avialable here ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/spaces/MohamedRashad/Arabic-Small-Nougat",
"href": "https://huggingface.co/spaces/MohamedRashad/Arabic-Small-Nougat",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "3. There is ",
"raw": "3. There is ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/MohamedRashad/Arabic-CivitAi-Images",
"href": null,
"resource": {
"type": "dataset",
"id": "MohamedRashad/Arabic-CivitAi-Images",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/MohamedRashad/Arabic-CivitAi-Images",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " dataset for text-to-image in the Arabic language (Hope someone utilize it to build something great).",
"raw": " dataset for text-to-image in the Arabic language (Hope someone utilize it to build something great).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "4. ",
"raw": "4. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/MohamedRashad/arabic-sts",
"href": null,
"resource": {
"type": "dataset",
"id": "MohamedRashad/arabic-sts",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/MohamedRashad/arabic-sts",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " for those who want to train an Arabic Text Embedder model.",
"raw": " for those who want to train an Arabic Text Embedder model.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "5. Finally, a small arabic dataset about translation from Fusha Arabic to English called ",
"raw": "5. Finally, a small arabic dataset about translation from Fusha Arabic to English called ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/MohamedRashad/rasaif-translations",
"href": null,
"resource": {
"type": "dataset",
"id": "MohamedRashad/rasaif-translations",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/MohamedRashad/rasaif-translations",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " (This dataset is very important in my opinion). ",
"raw": " (This dataset is very important in my opinion). ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | For those who love the Arabic language like me, This is a summary of my different models, datasets and spaces i made the last couple of months:
1. https://huggingface.co/MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct is a finetuned version of Meta-Llama-3-8B-Instruct using ORPO on https://huggingface.co/datasets/2A2I/argilla-dpo-mix-7k-arabic and the space to try it is here https://huggingface.co/spaces/MohamedRashad/Arabic-Chatbot-Arena.
2. https://huggingface.co/MohamedRashad/arabic-small-nougat is a finetuned version of https://huggingface.co/facebook/nougat-small on Arabic book pages to be a capable arabic-ocr and its space is also avialable here https://huggingface.co/spaces/MohamedRashad/Arabic-Small-Nougat.
3. There is https://huggingface.co/datasets/MohamedRashad/Arabic-CivitAi-Images dataset for text-to-image in the Arabic language (Hope someone utilize it to build something great).
4. https://huggingface.co/datasets/MohamedRashad/arabic-sts for those who want to train an Arabic Text Embedder model.
5. Finally, a small arabic dataset about translation from Fusha Arabic to English called https://huggingface.co/datasets/MohamedRashad/rasaif-translations (This dataset is very important in my opinion).
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg",
"fullname": "Mohamed Rashad",
"name": "MohamedRashad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 141,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🤗",
"users": [
"lunarflu",
"qnx",
"alielfilali01"
],
"count": 3
},
{
"reaction": "🔥",
"users": [
"lunarflu",
"abdullah"
],
"count": 2
}
] | 2024-05-08T08:40:00.000Z | 2024-05-10T14:22:14.399Z | [
{
"avatarUrl": "/avatars/38001a4b3a44a63d1fc902e8b5f7291f.svg",
"fullname": "Uthman Bilal",
"name": "Winnougan",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 4,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg",
"fullname": "Mohamed Rashad",
"name": "MohamedRashad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 141,
"isFollowing": false
}
] | /posts/MohamedRashad/813912988998606 | 1,822 | 2 |
163748050928136 | [
{
"type": "text",
"value": "hey guys i am trying to build a High frequency trading bot on ninja trader if you are intrested in making this money printing machine hit me up and lets create a start up!",
"raw": "hey guys i am trying to build a High frequency trading bot on ninja trader if you are intrested in making this money printing machine hit me up and lets create a start up!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | hey guys i am trying to build a High frequency trading bot on ninja trader if you are intrested in making this money printing machine hit me up and lets create a start up! | {
"avatarUrl": "/avatars/c3c3751d209e03e2414103ee528976d7.svg",
"fullname": "Nebyou Zewdu Lema",
"name": "nebazi12",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 8,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"nebazi12",
"topgan",
"teshhh",
"afkfatih",
"PetraLB",
"Hossein77",
"mikeh89",
"KingNish",
"john3huggingface",
"cdnuts"
],
"count": 10
},
{
"reaction": "🤝",
"users": [
"KingNish",
"john3huggingface"
],
"count": 2
},
{
"reaction": "🤗",
"users": [
"Lewdiculous"
],
"count": 1
},
{
"reaction": "😔",
"users": [
"nlpguy"
],
"count": 1
}
] | 2024-05-08T04:09:48.000Z | 2024-05-10T22:01:29.141Z | [
{
"avatarUrl": "/avatars/a6ebdc8cb493120a988602cba65a84be.svg",
"fullname": "johnboris",
"name": "john3huggingface",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "/avatars/235d54b867ebc959639a8a7fd004b4b5.svg",
"fullname": "scapking",
"name": "scapking",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/nebazi12/163748050928136 | 1,972 | 2 |
674895718910125 | [
{
"type": "text",
"value": "Working on a concept ",
"raw": "Working on a concept ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`GPT-2 (small)`",
"href": null,
"resource": null,
"url": null,
"code": "GPT-2 (small)",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " that uses ",
"raw": " that uses ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`KANs`",
"href": null,
"resource": null,
"url": null,
"code": "KANs",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " instead of ",
"raw": " instead of ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`MLPs`",
"href": null,
"resource": null,
"url": null,
"code": "MLPs",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The ckpt and training code will be soon on the hub.",
"raw": "The ckpt and training code will be soon on the hub.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Working on a concept `GPT-2 (small)` that uses `KANs` instead of `MLPs`.
The ckpt and training code will be soon on the hub. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png",
"fullname": "Manuel Romero",
"name": "mrm8488",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 2200,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"hanzla",
"mrm8488",
"clem",
"ucalyptus",
"holdyourseed",
"shengqiang",
"takeraparterer",
"wannaphong",
"teshhh",
"victor",
"zhangliqun",
"s3nh",
"santyzenith",
"taufiqdp",
"Norod78",
"radames",
"apol",
"Etherll",
"Korakoe",
"QuocKhanh",
"lunarflu",
"osanseviero",
"sagarduwal",
"mathiasn1",
"Lin-Rexter",
"awelker",
"aaditya",
"adarshxs",
"aceeee",
"johnlockejrr",
"dillfrescott"
],
"count": 31
},
{
"reaction": "👍",
"users": [
"shengqiang",
"takeraparterer",
"topgan",
"zhangliqun",
"Elserry",
"radames",
"QuocKhanh",
"lunarflu",
"Zmu",
"Lin-Rexter",
"kramp",
"johnlockejrr",
"dillfrescott"
],
"count": 13
},
{
"reaction": "🔥",
"users": [
"holdyourseed",
"takeraparterer",
"zhangliqun",
"QuocKhanh",
"lunarflu",
"Lin-Rexter",
"dillfrescott",
"m18coppola",
"Meggido",
"erickfmm",
"varl42"
],
"count": 11
},
{
"reaction": "🤯",
"users": [
"takeraparterer",
"Karsh-CAI",
"lunarflu",
"dillfrescott"
],
"count": 4
},
{
"reaction": "➕",
"users": [
"NickyNicky",
"takeraparterer",
"lunarflu",
"dillfrescott"
],
"count": 4
}
] | 2024-05-08T00:29:02.000Z | 2024-05-23T11:18:28.915Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg",
"fullname": "Firstname Lastname",
"name": "takeraparterer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 29,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg",
"fullname": "Mohamed Rashad",
"name": "MohamedRashad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 141,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png",
"fullname": "Manuel Romero",
"name": "mrm8488",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 2200,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6215ce9abfcb3893344dd0a2/0srkKGjBNRDKnlMxNrsmn.jpeg",
"fullname": "Cross",
"name": "dillfrescott",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 47,
"isFollowing": false
},
{
"avatarUrl": "/avatars/8f9a41da323c71b7cc17e86a24efa2eb.svg",
"fullname": "Jayanth Sai Marreddi",
"name": "Arlendious",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/mrm8488/674895718910125 | 5,254 | 6 |
264575332864309 | [
{
"type": "text",
"value": "Question: Quantization through GPTQ ",
"raw": "Question: Quantization through GPTQ ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hi Team, I’m trying to quantize a 13b model using the below configuration on A100. I tried the below options ",
"raw": "Hi Team, I’m trying to quantize a 13b model using the below configuration on A100. I tried the below options ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "quantization_config = GPTQConfig(",
"raw": "quantization_config = GPTQConfig(",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " bits=4,",
"raw": " bits=4,",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " group_size=128,",
"raw": " group_size=128,",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " dataset=\"wikitext2\",",
"raw": " dataset=\"wikitext2\",",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " batch_size=16,",
"raw": " batch_size=16,",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " desc_act=False",
"raw": " desc_act=False",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ")",
"raw": ")",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1. Enforce batch_size = 16 or batch_size = 2 at the quant configurations",
"raw": "1. Enforce batch_size = 16 or batch_size = 2 at the quant configurations",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2. Set tokenizer.pad_token_id = tokenizer.eos_token_id (which is 2)",
"raw": "2. Set tokenizer.pad_token_id = tokenizer.eos_token_id (which is 2)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I observed that even if we explicitly enforce the batch size and set the pad_token_id value other than None. It is not being considered ",
"raw": "I observed that even if we explicitly enforce the batch size and set the pad_token_id value other than None. It is not being considered ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " Can’t we set the batch_size and pad_token_id to some other value is this expected behavior with GPTQ . What is the reason behind this? Please suggest if there is any way to override the batch size config.",
"raw": " Can’t we set the batch_size and pad_token_id to some other value is this expected behavior with GPTQ . What is the reason behind this? Please suggest if there is any way to override the batch size config.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/optimum/blob/main/optimum/gptq/data.py#L51",
"href": "https://github.com/huggingface/optimum/blob/main/optimum/gptq/data.py#L51",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Could you kindly suggest? Appreciate your kind support. ",
"raw": "Could you kindly suggest? Appreciate your kind support. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Thanks",
"raw": "Thanks",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Question: Quantization through GPTQ
Hi Team, I’m trying to quantize a 13b model using the below configuration on A100. I tried the below options
quantization_config = GPTQConfig(
bits=4,
group_size=128,
dataset="wikitext2",
batch_size=16,
desc_act=False
)
1. Enforce batch_size = 16 or batch_size = 2 at the quant configurations
2. Set tokenizer.pad_token_id = tokenizer.eos_token_id (which is 2)
I observed that even if we explicitly enforce the batch size and set the pad_token_id value other than None. It is not being considered
Can’t we set the batch_size and pad_token_id to some other value is this expected behavior with GPTQ . What is the reason behind this? Please suggest if there is any way to override the batch size config.
https://github.com/huggingface/optimum/blob/main/optimum/gptq/data.py#L51
Could you kindly suggest? Appreciate your kind support.
Thanks
| {
"avatarUrl": "/avatars/a8ae31b23226e85b2f3d8e1c173da5f3.svg",
"fullname": "KR",
"name": "KashyapR",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6553a92df60d0d9678f45daf/KWi__ORhiP9FmYKqKHT-_.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"victor"
],
"count": 1
}
] | 2024-05-07T22:15:38.000Z | 2024-05-08T13:59:29.886Z | [
{
"avatarUrl": "/avatars/a8ae31b23226e85b2f3d8e1c173da5f3.svg",
"fullname": "KR",
"name": "KashyapR",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/KashyapR/264575332864309 | 1,984 | 1 |
428276777817105 | [
{
"type": "text",
"value": "Introducing CosmoChat, a multiturn chat dataset based on Cosmopedia that I'm working on in the open on the Hub. ",
"raw": "Introducing CosmoChat, a multiturn chat dataset based on Cosmopedia that I'm working on in the open on the Hub. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🎯 Goals:",
"raw": "🎯 Goals:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "💬 Create multi-turn chats seeded from Cosmopedia",
"raw": "💬 Create multi-turn chats seeded from Cosmopedia",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🎓 Customize questions for different audience levels",
"raw": "🎓 Customize questions for different audience levels",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔍 Evaluate the model's ability to elaborate and clarify",
"raw": "🔍 Evaluate the model's ability to elaborate and clarify",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤓 (I want to learn more about creating valuable synthetic datasets, and I learn best by doing stuff rather than reading stuff). ",
"raw": "🤓 (I want to learn more about creating valuable synthetic datasets, and I learn best by doing stuff rather than reading stuff). ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Cosmochat is created using the excellent distilabel library.",
"raw": "Cosmochat is created using the excellent distilabel library.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗 Explore the current version of the dataset: ",
"raw": "🔗 Explore the current version of the dataset: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/davanstrien/cosmochat",
"href": null,
"resource": {
"type": "dataset",
"id": "davanstrien/cosmochat",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/davanstrien/cosmochat",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📝 Read more: ",
"raw": "📝 Read more: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/davanstrien/cosmochat",
"href": "https://huggingface.co/blog/davanstrien/cosmochat",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Introducing CosmoChat, a multiturn chat dataset based on Cosmopedia that I'm working on in the open on the Hub.
🎯 Goals:
💬 Create multi-turn chats seeded from Cosmopedia
🎓 Customize questions for different audience levels
🔍 Evaluate the model's ability to elaborate and clarify
🤓 (I want to learn more about creating valuable synthetic datasets, and I learn best by doing stuff rather than reading stuff).
Cosmochat is created using the excellent distilabel library.
🔗 Explore the current version of the dataset: https://huggingface.co/datasets/davanstrien/cosmochat
📝 Read more: https://huggingface.co/blog/davanstrien/cosmochat | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg",
"fullname": "Daniel van Strien",
"name": "davanstrien",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 410,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"andrewrreed",
"clem",
"Locutusque",
"ucalyptus",
"Severian",
"alielfilali01"
],
"count": 6
},
{
"reaction": "👍",
"users": [
"MachineMaster",
"g-ronimo"
],
"count": 2
}
] | 2024-05-07T15:33:08.000Z | 2024-05-14T11:11:03.080Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64740cf7485a7c8e1bd51ac9/CXZCJm2x4ToT83pEIYyQR.png",
"fullname": "Beckett Dillon",
"name": "Severian",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 175,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg",
"fullname": "Daniel van Strien",
"name": "davanstrien",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 410,
"isFollowing": false
}
] | /posts/davanstrien/428276777817105 | 2,578 | 2 |
362814004058611 | [
{
"type": "text",
"value": "Question: HF model search not showing all results",
"raw": "Question: HF model search not showing all results",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I noticed that when I use the HF model search with these tags:",
"raw": "I noticed that when I use the HF model search with these tags:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- feature-extraction ",
"raw": "- feature-extraction ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- transformers.js ",
"raw": "- transformers.js ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "it is not showing all models that are actually tagged. ",
"raw": "it is not showing all models that are actually tagged. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Example: All Alibaba-NLP models (e.g. gte family) are correctly tagged but they don't show here",
"raw": "Example: All Alibaba-NLP models (e.g. gte family) are correctly tagged but they don't show here",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- ",
"raw": "- ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/models?pipeline_tag=feature-extraction&library=transformers.js&sort=trending&search=gte",
"href": "https://huggingface.co/models?pipeline_tag=feature-extraction&library=transformers.js&sort=trending&search=gte",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- correctly tagged model ",
"raw": "- correctly tagged model ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5",
"href": null,
"resource": {
"type": "model",
"id": "Alibaba-NLP/gte-large-en-v1.5",
"discussionNum": null
},
"url": "https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Does anyone know why? ",
"raw": "Does anyone know why? ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "fyi ",
"raw": "fyi ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Xenova",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Xenova",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Question: HF model search not showing all results
I noticed that when I use the HF model search with these tags:
- feature-extraction
- transformers.js
it is not showing all models that are actually tagged.
Example: All Alibaba-NLP models (e.g. gte family) are correctly tagged but they don't show here
- https://huggingface.co/models?pipeline_tag=feature-extraction&library=transformers.js&sort=trending&search=gte
- correctly tagged model https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5
Does anyone know why?
fyi @Xenova | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/IiercF_qxHWize2kitl9X.jpeg",
"fullname": "Dominik Weckmüller",
"name": "do-me",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 38,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png",
"fullname": "Joshua",
"name": "Xenova",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 3792
}
] | [
{
"reaction": "🔥",
"users": [
"adikur21"
],
"count": 1
}
] | 2024-05-07T11:54:49.000Z | 2024-05-07T13:40:05.722Z | [
{
"avatarUrl": "/avatars/93703e565323afcd226a76cf6baeb0f7.svg",
"fullname": "Nick Doiron",
"name": "monsoon-nlp",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 29,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/IiercF_qxHWize2kitl9X.jpeg",
"fullname": "Dominik Weckmüller",
"name": "do-me",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 38,
"isFollowing": false
}
] | /posts/do-me/362814004058611 | 2,337 | 3 |
125580095085864 | [
{
"type": "text",
"value": "Recently, the Hugging Face 🤗 datasets team met with the Language Technologies team led by Marta Villegas (",
"raw": "Recently, the Hugging Face 🤗 datasets team met with the Language Technologies team led by Marta Villegas (",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@mvillegas",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "mvillegas",
"label": null,
"lang": null
},
{
"type": "text",
"value": ") at Barcelona Supercomputing Center ",
"raw": ") at Barcelona Supercomputing Center ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@BSC-LT",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "BSC-LT",
"label": null,
"lang": null
},
{
"type": "text",
"value": ". Eager to collaborate to promote AI across Catalan, Spanish, Basque, and Galician languages and share open-source ",
"raw": ". Eager to collaborate to promote AI across Catalan, Spanish, Basque, and Galician languages and share open-source ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "datasets/models.",
"raw": "datasets/models.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " 🤝 #AI #LanguageTech #OpenSource",
"raw": " 🤝 #AI #LanguageTech #OpenSource",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Recently, the Hugging Face 🤗 datasets team met with the Language Technologies team led by Marta Villegas (@mvillegas) at Barcelona Supercomputing Center @BSC-LT. Eager to collaborate to promote AI across Catalan, Spanish, Basque, and Galician languages and share open-source datasets/models. 🤝 #AI #LanguageTech #OpenSource | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg",
"fullname": "Albert Villanova del Moral",
"name": "albertvillanova",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 196,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5fbfd09ee366524fe8e97cd3/AzxmoDvYoOPWmmSPvm1ij.jpeg"
}
] | [
{
"avatarUrl": "/avatars/7a350649ab7261dcbcfb3df40b54d3ab.svg",
"fullname": "Marta Villegas",
"name": "mvillegas",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 6
}
] | [
{
"reaction": "👍",
"users": [
"Aaron2465",
"stevevomwege",
"ndaru12",
"lhoestq",
"Tonic",
"victor",
"Benyamin369",
"adikur21",
"not-lain",
"thomwolf",
"apol",
"Mooda2035",
"titae",
"AKOBOT",
"VictorSanh",
"jnemecek",
"PereLluis13"
],
"count": 17
},
{
"reaction": "😎",
"users": [
"severo",
"lhoestq",
"Tonic",
"monsoon-nlp",
"jeffboudier",
"not-lain",
"davanstrien",
"VictorSanh"
],
"count": 8
},
{
"reaction": "🤗",
"users": [
"lhoestq",
"Tonic",
"jeffboudier",
"not-lain"
],
"count": 4
}
] | 2024-05-07T04:57:48.000Z | 2024-05-07T20:02:35.657Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/60dc215386932230e632cdeb/7Tsyenn5aQsjgvM-JkcRu.jpeg",
"fullname": "Miguel Guerrero",
"name": "apol",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
}
] | /posts/albertvillanova/125580095085864 | 4,059 | 1 |
638013244033159 | [
{
"type": "text",
"value": "Ai splash coffee",
"raw": "Ai splash coffee",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Ai splash coffee | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/_xwHSKSCHtztulewyk24C.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/yzfct1IrLDWo37TvAJElc.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/NyVu4DIl6oHeeTSbQY0aa.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/6CPD_xOXmcddbSg1C6_lU.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/MXbQxG7FkYYIGAfAeGYXQ.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/_OqvCZCS16IrbfGl6EUqs.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/LhIqcOWrMikUGqy2fmHzw.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/TVxgVviWAp6HW7M9pa_kQ.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/utQunLQ4nQ7Xvl4dg7dTj.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/TL6mXvo2s_oNVymzOLWYM.webp"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"linxi",
"IsmaelMousa",
"EloyOn",
"Benyamin369",
"adikur21",
"belyakoff",
"AtAndDev",
"faisalbsl21",
"dillfrescott",
"Ramikan-BR"
],
"count": 10
},
{
"reaction": "👍",
"users": [
"boarder",
"Aaron2465",
"Noomam",
"AtAndDev",
"dillfrescott"
],
"count": 5
},
{
"reaction": "❤️",
"users": [
"dillfrescott"
],
"count": 1
}
] | 2024-05-07T01:48:34.000Z | 2024-05-07T01:48:34.028Z | [] | /posts/phenixrhyder/638013244033159 | 2,682 | 0 |
702249780017820 | [
{
"type": "text",
"value": "Audio transcription is one of the most useful use cases for journalists (and many other professions!). ",
"raw": "Audio transcription is one of the most useful use cases for journalists (and many other professions!). ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@sergeipetrov",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "sergeipetrov",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@reach-vb",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "reach-vb",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@pcuenq",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "pcuenq",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", and ",
"raw": ", and ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@philschmid",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "philschmid",
"label": null,
"lang": null
},
{
"type": "text",
"value": " have created an optimized Whisper with Speaker Diarization for ",
"raw": " have created an optimized Whisper with Speaker Diarization for ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@huggingface",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "huggingface",
"label": null,
"lang": null
},
{
"type": "text",
"value": " Inference Endpoints—definitely worth a read!",
"raw": " Inference Endpoints—definitely worth a read!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check out their blog post here: ",
"raw": "Check out their blog post here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/asr-diarization",
"href": "https://huggingface.co/blog/asr-diarization",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can find the notebook here: ",
"raw": "You can find the notebook here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/sergeipetrov/asrdiarization-handler/blob/main/handler.py",
"href": null,
"resource": {
"type": "model",
"id": "sergeipetrov/asrdiarization-handler",
"discussionNum": null
},
"url": "https://huggingface.co/sergeipetrov/asrdiarization-handler/blob/main/handler.py",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Audio transcription is one of the most useful use cases for journalists (and many other professions!). @sergeipetrov, @reach-vb, @pcuenq, and @philschmid have created an optimized Whisper with Speaker Diarization for @huggingface Inference Endpoints—definitely worth a read!
Check out their blog post here: https://huggingface.co/blog/asr-diarization
You can find the notebook here: https://huggingface.co/sergeipetrov/asrdiarization-handler/blob/main/handler.py | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg",
"fullname": "Pedro Cuenca",
"name": "pcuenq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 444
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1624629516652-5ff5d596f244529b3ec0fb89.png",
"fullname": "Philipp Schmid",
"name": "philschmid",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 657
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg",
"fullname": "Vaibhav Srivastav",
"name": "reach-vb",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 460
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D78gS9F1gE6mwdbpyzT5K.jpeg",
"fullname": "Sergei Petrov",
"name": "sergeipetrov",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 42
}
] | [
{
"reaction": "👍",
"users": [
"Aaron2465",
"BrigitteTousi"
],
"count": 2
}
] | 2024-05-07T01:12:37.000Z | 2024-05-07T01:12:37.507Z | [] | /posts/fdaudens/702249780017820 | 2,311 | 0 |
642770739646573 | [
{
"type": "text",
"value": "Sparse Concept Bottleneck Models: Gumbel Tricks in Contrastive Learning",
"raw": "Sparse Concept Bottleneck Models: Gumbel Tricks in Contrastive Learning",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Paper: ",
"raw": "Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.03323",
"href": null,
"resource": {
"type": "paper",
"id": "2404.03323",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.03323",
"code": null,
"user": null,
"label": "Sparse Concept Bottleneck Models: Gumbel Tricks in Contrastive Learning (2404.03323)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Authors propose a novel architecture and method of explainable classification with Concept Bottleneck Models (CBMs): they introduce a new type of layers known as Concept Bottleneck Layers (CBL), and present three methods for training them: with $\\ell_1$-loss, contrastive loss and loss function based on Gumbel-Softmax distribution (Sparse-CBM), while final FC layer is still trained with Cross-Entropy. They show a significant increase in accuracy using sparse hidden layers in CLIP-based bottleneck models. Which means that sparse representation of concepts activation vector is meaningful in Concept Bottleneck Models. ",
"raw": "Authors propose a novel architecture and method of explainable classification with Concept Bottleneck Models (CBMs): they introduce a new type of layers known as Concept Bottleneck Layers (CBL), and present three methods for training them: with $\\ell_1$-loss, contrastive loss and loss function based on Gumbel-Softmax distribution (Sparse-CBM), while final FC layer is still trained with Cross-Entropy. They show a significant increase in accuracy using sparse hidden layers in CLIP-based bottleneck models. Which means that sparse representation of concepts activation vector is meaningful in Concept Bottleneck Models. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Key concepts:",
"raw": "Key concepts:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Contrastive Gumbel-Softmax loss: the first contrastive variant of Gumbel-Softmax objective which achieves an inner sparse representation of the Concept Bottleneck Layer activations. ",
"raw": "– Contrastive Gumbel-Softmax loss: the first contrastive variant of Gumbel-Softmax objective which achieves an inner sparse representation of the Concept Bottleneck Layer activations. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Sparse $\\ell_1$ regularization.",
"raw": "– Sparse $\\ell_1$ regularization.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Contrastive loss for inner layers of the model.",
"raw": "– Contrastive loss for inner layers of the model.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Methodology:",
"raw": "Methodology:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The approach consists of three main steps:",
"raw": "The approach consists of three main steps:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Create a set of concepts based on the labels of the dataset.",
"raw": "– Create a set of concepts based on the labels of the dataset.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Supply a multi-modal encoder with CBL.",
"raw": "– Supply a multi-modal encoder with CBL.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "– Train this CBL with the picked objective function and train the classifiers head with Cross-Entropy.",
"raw": "– Train this CBL with the picked objective function and train the classifiers head with Cross-Entropy.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Results and Analysis:",
"raw": "Results and Analysis:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The methodology can be applied to the task of interpreted image classification. And the experimental results show the superiority of using sparse hidden representations of concepts.",
"raw": "The methodology can be applied to the task of interpreted image classification. And the experimental results show the superiority of using sparse hidden representations of concepts.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Sparse Concept Bottleneck Models: Gumbel Tricks in Contrastive Learning
Paper: https://huggingface.co/papers/2404.03323
Authors propose a novel architecture and method of explainable classification with Concept Bottleneck Models (CBMs): they introduce a new type of layers known as Concept Bottleneck Layers (CBL), and present three methods for training them: with $\ell_1$-loss, contrastive loss and loss function based on Gumbel-Softmax distribution (Sparse-CBM), while final FC layer is still trained with Cross-Entropy. They show a significant increase in accuracy using sparse hidden layers in CLIP-based bottleneck models. Which means that sparse representation of concepts activation vector is meaningful in Concept Bottleneck Models.
Key concepts:
– Contrastive Gumbel-Softmax loss: the first contrastive variant of Gumbel-Softmax objective which achieves an inner sparse representation of the Concept Bottleneck Layer activations.
– Sparse $\ell_1$ regularization.
– Contrastive loss for inner layers of the model.
Methodology:
The approach consists of three main steps:
– Create a set of concepts based on the labels of the dataset.
– Supply a multi-modal encoder with CBL.
– Train this CBL with the picked objective function and train the classifiers head with Cross-Entropy.
Results and Analysis:
The methodology can be applied to the task of interpreted image classification. And the experimental results show the superiority of using sparse hidden representations of concepts.
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/iOjE2dcSUS-XYE0WJe6U8.jpeg",
"fullname": "Andrei Semenov",
"name": "Andron00e",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/MDP2v1YvH0_AjcGi6LQye.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/oBCJ-4Les-QuV994todKz.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/MbnEYKNlc9SBfy42dZVlD.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/wLERu_1LZgTlxwvvzMBFc.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/-K9wUepKJcWsbFHJejg54.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/D28Non069ASoy6P5KUTck.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6479f8335f3450e1ded40774/nQdJQmkohvBdCVtO7qGm4.png"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"Andron00e",
"tit33",
"EffyOsvin"
],
"count": 3
},
{
"reaction": "👀",
"users": [
"Andron00e",
"EffyOsvin"
],
"count": 2
},
{
"reaction": "🔥",
"users": [
"EffyOsvin"
],
"count": 1
}
] | 2024-05-07T00:16:02.000Z | 2024-05-07T00:16:33.196Z | [] | /posts/Andron00e/642770739646573 | 2,018 | 0 |
989801342899057 | [
{
"type": "text",
"value": "🔬 Open LLM Progress Tracker 🔬",
"raw": "🔬 Open LLM Progress Tracker 🔬",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Inspired by the awesome work from ",
"raw": "Inspired by the awesome work from ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@mlabonne",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "mlabonne",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", I created a Space to monitor the narrowing gap between open and proprietary LLMs as scored by the LMSYS Chatbot Arena ELO ratings 🤗",
"raw": ", I created a Space to monitor the narrowing gap between open and proprietary LLMs as scored by the LMSYS Chatbot Arena ELO ratings 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The goal is to have a continuously updated place to easily visualize these rapidly evolving industry trends 🚀",
"raw": "The goal is to have a continuously updated place to easily visualize these rapidly evolving industry trends 🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗 Open LLM Progress Tracker: ",
"raw": "🔗 Open LLM Progress Tracker: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/andrewrreed/closed-vs-open-arena-elo",
"href": null,
"resource": {
"type": "space",
"id": "andrewrreed/closed-vs-open-arena-elo",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/andrewrreed/closed-vs-open-arena-elo",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗 Source of Inspiration: ",
"raw": "🔗 Source of Inspiration: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.linkedin.com/posts/maxime-labonne_arena-elo-graph-updated-with-new-models-activity-7187062633735368705-u2jB/",
"href": "https://www.linkedin.com/posts/maxime-labonne_arena-elo-graph-updated-with-new-models-activity-7187062633735368705-u2jB/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔬 Open LLM Progress Tracker 🔬
Inspired by the awesome work from @mlabonne, I created a Space to monitor the narrowing gap between open and proprietary LLMs as scored by the LMSYS Chatbot Arena ELO ratings 🤗
The goal is to have a continuously updated place to easily visualize these rapidly evolving industry trends 🚀
🔗 Open LLM Progress Tracker: https://huggingface.co/spaces/andrewrreed/closed-vs-open-arena-elo
🔗 Source of Inspiration: https://www.linkedin.com/posts/maxime-labonne_arena-elo-graph-updated-with-new-models-activity-7187062633735368705-u2jB/ | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61d375fd733d3a83ecd1bba9/oIXwvvs1-HaCnJXMCZgkc.jpeg",
"fullname": "Andrew Reed",
"name": "andrewrreed",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 106,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/61d375fd733d3a83ecd1bba9/fTLkXG5es49cj_m_H-rhR.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/JtUGAwVh_4cDEsjNcfpye.png",
"fullname": "Maxime Labonne",
"name": "mlabonne",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3486
}
] | [
{
"reaction": "❤️",
"users": [
"mlabonne",
"kristaller486",
"DDJN-data-science",
"NePe",
"m-ric",
"victor",
"rennokki",
"radames",
"nachoyawn",
"ucyang",
"clboetticher",
"Optimisuke"
],
"count": 12
},
{
"reaction": "🔥",
"users": [
"pakkinlau",
"m-ric",
"victor",
"DimaGM",
"Optimisuke",
"jeffboudier"
],
"count": 6
}
] | 2024-05-06T22:45:14.000Z | 2024-05-07T10:44:10.197Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/JtUGAwVh_4cDEsjNcfpye.png",
"fullname": "Maxime Labonne",
"name": "mlabonne",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3486,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61d375fd733d3a83ecd1bba9/oIXwvvs1-HaCnJXMCZgkc.jpeg",
"fullname": "Andrew Reed",
"name": "andrewrreed",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 106,
"isFollowing": false
}
] | /posts/andrewrreed/989801342899057 | 2,454 | 2 |
917721899427164 | [
{
"type": "mention",
"value": null,
"raw": "@llmixer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "llmixer",
"label": null,
"lang": null
},
{
"type": "text",
"value": " is doing g-d's work by orthogonalizing weakness out of free-thinking models, well-done ",
"raw": " is doing g-d's work by orthogonalizing weakness out of free-thinking models, well-done ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@llmixer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "llmixer",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | @llmixer is doing g-d's work by orthogonalizing weakness out of free-thinking models, well-done @llmixer | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62e33241e5431c5d1ad3a6f0/ReToPZt5X17cXyWQDukdy.png",
"fullname": "Barton Rhodes",
"name": "bmorphism",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 11,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65a6db055c58475cf9e6def1/0FU61p55f_3AOGjjGa4Vq.png",
"fullname": "llmixer",
"name": "llmixer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 20
}
] | [
{
"reaction": "🔥",
"users": [
"tit33",
"matinla"
],
"count": 2
},
{
"reaction": "❤️",
"users": [
"matinla"
],
"count": 1
}
] | 2024-05-06T22:30:21.000Z | 2024-05-06T22:30:21.217Z | [] | /posts/bmorphism/917721899427164 | 1,690 | 0 |
489135232540079 | [
{
"type": "text",
"value": "We release Idefics2-chatty, the chatbot-optimized version of Idefics2: ",
"raw": "We release Idefics2-chatty, the chatbot-optimized version of Idefics2: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty",
"href": null,
"resource": {
"type": "model",
"id": "HuggingFaceM4/idefics2-8b-chatty",
"discussionNum": null
},
"url": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Idefics2-chatty is better at following instructions and following Chain-of-Thoughts reasoning.",
"raw": "Idefics2-chatty is better at following instructions and following Chain-of-Thoughts reasoning.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Moreover, we also release a paper, containing a lot of findings on how to build an efficient and performant Vision-Language Model: ",
"raw": "Moreover, we also release a paper, containing a lot of findings on how to build an efficient and performant Vision-Language Model: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2405.02246",
"href": null,
"resource": {
"type": "paper",
"id": "2405.02246",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2405.02246",
"code": null,
"user": null,
"label": "What matters when building vision-language models? (2405.02246)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "How are you going to use the model, or what data are you going to fine-tune it on?",
"raw": "How are you going to use the model, or what data are you going to fine-tune it on?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | We release Idefics2-chatty, the chatbot-optimized version of Idefics2: https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty
Idefics2-chatty is better at following instructions and following Chain-of-Thoughts reasoning.
Moreover, we also release a paper, containing a lot of findings on how to build an efficient and performant Vision-Language Model: https://huggingface.co/papers/2405.02246
How are you going to use the model, or what data are you going to fine-tune it on? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1635201569275-noauth.jpeg",
"fullname": "Hugo Laurençon",
"name": "HugoLaurencon",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 146,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6177322d37f32ecb1e2d4cdf/-nbfSXtCfEicQiC_MyCt5.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6177322d37f32ecb1e2d4cdf/doYjfXf4HYtIdcIRB4taG.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6177322d37f32ecb1e2d4cdf/be-3CjeT7BSiBIt_l9M31.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6177322d37f32ecb1e2d4cdf/kOP6Mv2SqMfx7CaRzWZQ1.png"
}
] | [] | [
{
"reaction": "❤️",
"users": [
"yhaj1017",
"zafar5783",
"0xgimic",
"pedi",
"VictorSanh",
"DmitryRyumin",
"Nazikkrs",
"samusenps",
"DongfuJiang",
"victor",
"ahmetozkann",
"tit33",
"Eyel",
"louisbrulenaudet",
"aloobun",
"osanseviero",
"Lewdiculous",
"muhtasham"
],
"count": 18
},
{
"reaction": "🚀",
"users": [
"yhaj1017",
"zafar5783",
"VictorSanh",
"DmitryRyumin",
"samusenps",
"DongfuJiang",
"victor",
"ahmetozkann",
"Lewdiculous",
"muhtasham"
],
"count": 10
},
{
"reaction": "🔥",
"users": [
"samusenps",
"ahmedzx",
"DongfuJiang",
"victor"
],
"count": 4
}
] | 2024-05-06T15:47:23.000Z | 2024-06-18T09:30:26.488Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/649935abbe8fd92c27ab1ed8/ueWnaZtJa-oWpzupP6FV8.png",
"fullname": "David Leon",
"name": "DavidLeon",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1635201569275-noauth.jpeg",
"fullname": "Hugo Laurençon",
"name": "HugoLaurencon",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 146,
"isFollowing": false
},
{
"avatarUrl": "/avatars/151bbea6cbb8ce3ac51e5b3afc88ea8b.svg",
"fullname": "baes",
"name": "obaes",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/HugoLaurencon/489135232540079 | 2,846 | 5 |
470080228607147 | [
{
"type": "text",
"value": "💬🔥Releasing idefics2-8b-chatty, the chat-optimized version of Idefics2!",
"raw": "💬🔥Releasing idefics2-8b-chatty, the chat-optimized version of Idefics2!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It is a very efficient (8B parameters) state-of-the-art VLM, has been red-teamed, and comes with a few surprises:",
"raw": "It is a very efficient (8B parameters) state-of-the-art VLM, has been red-teamed, and comes with a few surprises:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 📖Paper dissecting a lot of the experimental insights we learned building Idefics2:",
"raw": "- 📖Paper dissecting a lot of the experimental insights we learned building Idefics2:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 🏎️TGI integration for blazing-fast inference (you can already run it locally with < 24GB GPU memory)",
"raw": "- 🏎️TGI integration for blazing-fast inference (you can already run it locally with < 24GB GPU memory)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 🏆 Ranking 2nd in its category (< 10B, open weights) in the awesome Open VLM Leaderboard, and now appearing in the incredible Vision Arena",
"raw": "- 🏆 Ranking 2nd in its category (< 10B, open weights) in the awesome Open VLM Leaderboard, and now appearing in the incredible Vision Arena",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Ressources:",
"raw": "Ressources:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "⏯️Playground: ",
"raw": "⏯️Playground: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/HuggingFaceM4/idefics2_playground",
"href": null,
"resource": {
"type": "space",
"id": "HuggingFaceM4/idefics2_playground",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/HuggingFaceM4/idefics2_playground",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📖Paper: ",
"raw": "📖Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2405.02246",
"href": null,
"resource": {
"type": "paper",
"id": "2405.02246",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2405.02246",
"code": null,
"user": null,
"label": "What matters when building vision-language models? (2405.02246)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🏋️♂️Model and red-teaming analysis: ",
"raw": "🏋️♂️Model and red-teaming analysis: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty",
"href": null,
"resource": {
"type": "model",
"id": "HuggingFaceM4/idefics2-8b-chatty",
"discussionNum": null
},
"url": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👀Ressources to get started: ",
"raw": "👀Ressources to get started: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty#how-to-get-started",
"href": null,
"resource": {
"type": "model",
"id": "HuggingFaceM4/idefics2-8b-chatty",
"discussionNum": null
},
"url": "https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty#how-to-get-started",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🏆Open VLM Leaderboard: ",
"raw": "🏆Open VLM Leaderboard: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/opencompass/open_vlm_leaderboard",
"href": null,
"resource": {
"type": "space",
"id": "opencompass/open_vlm_leaderboard",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/opencompass/open_vlm_leaderboard",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🏟️Vision arena: ",
"raw": "🏟️Vision arena: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/WildVision/vision-arena",
"href": null,
"resource": {
"type": "space",
"id": "WildVision/vision-arena",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/WildVision/vision-arena",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 💬🔥Releasing idefics2-8b-chatty, the chat-optimized version of Idefics2!
It is a very efficient (8B parameters) state-of-the-art VLM, has been red-teamed, and comes with a few surprises:
- 📖Paper dissecting a lot of the experimental insights we learned building Idefics2:
- 🏎️TGI integration for blazing-fast inference (you can already run it locally with < 24GB GPU memory)
- 🏆 Ranking 2nd in its category (< 10B, open weights) in the awesome Open VLM Leaderboard, and now appearing in the incredible Vision Arena
Ressources:
⏯️Playground: https://huggingface.co/spaces/HuggingFaceM4/idefics2_playground
📖Paper: https://huggingface.co/papers/2405.02246
🏋️♂️Model and red-teaming analysis: https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty
👀Ressources to get started: https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty#how-to-get-started
🏆Open VLM Leaderboard: https://huggingface.co/spaces/opencompass/open_vlm_leaderboard
🏟️Vision arena: https://huggingface.co/spaces/WildVision/vision-arena | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1619623771844-5ecea265968f6028e0559fa5.jpeg",
"fullname": "Victor Sanh",
"name": "VictorSanh",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 206,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5ecea265968f6028e0559fa5/dfLLxXzCVh6e_Kfp1OkCR.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"HugoLaurencon",
"Srikanth492",
"FLOGFI",
"yhaj1017",
"zafar5783",
"pedi",
"Nazikkrs",
"ShahabVFX",
"mrslimbine",
"Eyel",
"osanseviero"
],
"count": 11
},
{
"reaction": "🤯",
"users": [
"awacke1"
],
"count": 1
},
{
"reaction": "🧠",
"users": [
"awacke1"
],
"count": 1
}
] | 2024-05-06T15:33:20.000Z | 2024-06-17T01:24:32.282Z | [
{
"avatarUrl": "/avatars/9cf1898b538cc4e6a5100475877c16e0.svg",
"fullname": "Manu",
"name": "suryavanshi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/VictorSanh/470080228607147 | 2,766 | 1 |
339322634198020 | [
{
"type": "text",
"value": "NEW publishing hub for trending text model academic research papers broken down into open source technical blog posts ➡️ ",
"raw": "NEW publishing hub for trending text model academic research papers broken down into open source technical blog posts ➡️ ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://textmodels.tech",
"href": "https://textmodels.tech",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | NEW publishing hub for trending text model academic research papers broken down into open source technical blog posts ➡️ https://textmodels.tech | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64862a25cf5ad5e1f0482ef2/61qPUtw9jIl7zpPYmi0VW.jpeg",
"fullname": "David Smooke",
"name": "Smooke",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 43,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64862a25cf5ad5e1f0482ef2/JFRJqs-9bZZfKQAueCBUQ.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"andr3weth",
"jheza30",
"boksman",
"Ndonda871",
"nivram189",
"kooshi",
"FLOGFI",
"pedi",
"ShahabVFX",
"ahmedzx",
"Aaron2465"
],
"count": 11
},
{
"reaction": "🚀",
"users": [
"andr3weth",
"jheza30",
"Ndonda871",
"nivram189",
"Lalush",
"pedi",
"deamyasin",
"ahmedzx"
],
"count": 8
},
{
"reaction": "❤️",
"users": [
"tit33",
"rinoa"
],
"count": 2
}
] | 2024-05-06T14:28:13.000Z | 2024-05-06T14:28:13.613Z | [] | /posts/Smooke/339322634198020 | 1,815 | 0 |
561309074433333 | [
{
"type": "text",
"value": "This dataset is cool:",
"raw": "This dataset is cool:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/MohamedRashad/midjourney-detailed-prompts",
"href": null,
"resource": {
"type": "dataset",
"id": "MohamedRashad/midjourney-detailed-prompts",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/MohamedRashad/midjourney-detailed-prompts",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | This dataset is cool:
https://huggingface.co/datasets/MohamedRashad/midjourney-detailed-prompts | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg",
"fullname": "Mohamed Rashad",
"name": "MohamedRashad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 141,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🤯",
"users": [
"kelork",
"niwram69",
"boksman",
"Ndonda871",
"nivram189",
"kooshi",
"FLOGFI",
"pedi",
"ahmedzx"
],
"count": 9
},
{
"reaction": "🤗",
"users": [
"Shunchan666",
"Ndonda871",
"nivram189",
"Lalush",
"pedi",
"ahmedzx"
],
"count": 6
},
{
"reaction": "❤️",
"users": [
"tit33"
],
"count": 1
}
] | 2024-05-06T10:22:31.000Z | 2024-05-06T20:00:48.824Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6638fd0f0419b502169e1e4f/D05Z0lIcowX9rKKElxXhO.jpeg",
"fullname": "zafar Iqbal",
"name": "zafar5783",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "/avatars/09d248a6f220819763c810a73a324e8c.svg",
"fullname": "rshee",
"name": "pedi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
}
] | /posts/MohamedRashad/561309074433333 | 2,292 | 5 |
723432044074641 | [
{
"type": "text",
"value": "I've just stumbled upon some excellent work on (🇫🇷 French) retrieval models by ",
"raw": "I've just stumbled upon some excellent work on (🇫🇷 French) retrieval models by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@antoinelouis",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "antoinelouis",
"label": null,
"lang": null
},
{
"type": "text",
"value": ". Kudos to him! ",
"raw": ". Kudos to him! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- French Embedding Models: ",
"raw": "- French Embedding Models: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/collections/antoinelouis/dense-single-vector-bi-encoders-651523c0c75a3d4c44fc864d",
"href": "https://huggingface.co/collections/antoinelouis/dense-single-vector-bi-encoders-651523c0c75a3d4c44fc864d",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- French Reranker Models: ",
"raw": "- French Reranker Models: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/antoinelouis/cross-encoder-rerankers-651523f16efa656d1788a239",
"href": null,
"resource": {
"type": "collection",
"id": "antoinelouis/cross-encoder-rerankers-651523f16efa656d1788a239",
"discussionNum": null
},
"url": "https://huggingface.co/collections/antoinelouis/cross-encoder-rerankers-651523f16efa656d1788a239",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- French Multi-vector Models: ",
"raw": "- French Multi-vector Models: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/collections/antoinelouis/dense-multi-vector-bi-encoders-6589a8ee6b17c06872e9f075",
"href": "https://huggingface.co/collections/antoinelouis/dense-multi-vector-bi-encoders-6589a8ee6b17c06872e9f075",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Multilingual Models: ",
"raw": "- Multilingual Models: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/collections/antoinelouis/modular-retrievers-65d53d0db64b1d644aea620c",
"href": "https://huggingface.co/collections/antoinelouis/modular-retrievers-65d53d0db64b1d644aea620c",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "A lot of these models use the MS MARCO Hard Negatives dataset, which I'm currently reformatting to be more easily usable. Notably, they should work out of the box without any pre-processing for training embedding models in the upcoming Sentence Transformers v3.",
"raw": "A lot of these models use the MS MARCO Hard Negatives dataset, which I'm currently reformatting to be more easily usable. Notably, they should work out of the box without any pre-processing for training embedding models in the upcoming Sentence Transformers v3.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I've just stumbled upon some excellent work on (🇫🇷 French) retrieval models by @antoinelouis. Kudos to him!
- French Embedding Models: https://huggingface.co/collections/antoinelouis/dense-single-vector-bi-encoders-651523c0c75a3d4c44fc864d
- French Reranker Models: https://huggingface.co/collections/antoinelouis/cross-encoder-rerankers-651523f16efa656d1788a239
- French Multi-vector Models: https://huggingface.co/collections/antoinelouis/dense-multi-vector-bi-encoders-6589a8ee6b17c06872e9f075
- Multilingual Models: https://huggingface.co/collections/antoinelouis/modular-retrievers-65d53d0db64b1d644aea620c
A lot of these models use the MS MARCO Hard Negatives dataset, which I'm currently reformatting to be more easily usable. Notably, they should work out of the box without any pre-processing for training embedding models in the upcoming Sentence Transformers v3. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png",
"fullname": "Tom Aarsen",
"name": "tomaarsen",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 1060,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1600354981837-5f298d07e923d665e6162802.jpeg",
"fullname": "Antoine Louis",
"name": "antoinelouis",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 37
}
] | [
{
"reaction": "🔥",
"users": [
"giux78",
"ayush7",
"jdagh",
"Ndonda871",
"pedi",
"ShahabVFX",
"osanseviero"
],
"count": 7
},
{
"reaction": "🤗",
"users": [
"antoinelouis",
"Ndonda871",
"pedi"
],
"count": 3
},
{
"reaction": "😎",
"users": [
"Ramayanti",
"pedi"
],
"count": 2
},
{
"reaction": "🤯",
"users": [
"kelork"
],
"count": 1
}
] | 2024-05-06T07:56:11.000Z | 2024-05-06T07:57:24.861Z | [] | /posts/tomaarsen/723432044074641 | 2,734 | 0 |
333692149429658 | [
{
"type": "mention",
"value": null,
"raw": "@rubend18",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "rubend18",
"label": null,
"lang": null
},
{
"type": "text",
"value": " Hello Ruben, I have been trying to manipulate my Andriod and other phones for some time. Friends always ask, its a hit or miss. May I have access to your repo on the jailbreaking please. ",
"raw": " Hello Ruben, I have been trying to manipulate my Andriod and other phones for some time. Friends always ask, its a hit or miss. May I have access to your repo on the jailbreaking please. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | @rubend18 Hello Ruben, I have been trying to manipulate my Andriod and other phones for some time. Friends always ask, its a hit or miss. May I have access to your repo on the jailbreaking please.
| {
"avatarUrl": "/avatars/85b14360dfc6d4ac970f3e2bcbb14e00.svg",
"fullname": "Christopher Salazar",
"name": "werewolf5",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1677074181724-noauth.jpeg",
"fullname": "Rubén Darío Jaramillo Romero",
"name": "rubend18",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 6
}
] | [
{
"reaction": "🔥",
"users": [
"kelork",
"Ndonda871",
"kooshi",
"pedi",
"ShahabVFX",
"Dariabaraeva"
],
"count": 6
}
] | 2024-05-05T22:03:21.000Z | 2024-05-06T15:34:23.920Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/663416c25806f759bfafdcec/SWeq6bW0AfDdLm_QV_mqg.png",
"fullname": "Everett Smith",
"name": "EvvyDZero1",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61766719596f673069f64dbc/n9JgviFMfhcoPr9Swb500.png",
"fullname": "Akash Singh",
"name": "akashicmarga",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 11,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6638f0c67256327c68643c3a/2E3HZkdMgcASnUcgp2SNB.jpeg",
"fullname": "kooshi nare",
"name": "kooshi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/werewolf5/333692149429658 | 2,619 | 3 |
802475347987275 | [
{
"type": "text",
"value": "🚀🎭🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟🎭🚀",
"raw": "🚀🎭🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟🎭🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📄 Title: 3D Gaussian Blendshapes for Head Avatar Animation 🔝",
"raw": "📄 Title: 3D Gaussian Blendshapes for Head Avatar Animation 🔝",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📝 Description: 3D Gaussian Blendshapes for Head Avatar Animation is a novel method for modeling and animating photorealistic head avatars from monocular video input.",
"raw": "📝 Description: 3D Gaussian Blendshapes for Head Avatar Animation is a novel method for modeling and animating photorealistic head avatars from monocular video input.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👥 Authors: Shengjie Ma, Yanlin Weng, Tianjia Shao, and Kun Zhou",
"raw": "👥 Authors: Shengjie Ma, Yanlin Weng, Tianjia Shao, and Kun Zhou",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸",
"raw": "📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📄 Paper: ",
"raw": "📄 Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.19398",
"href": null,
"resource": {
"type": "paper",
"id": "2404.19398",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.19398",
"code": null,
"user": null,
"label": "3D Gaussian Blendshapes for Head Avatar Animation (2404.19398)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌐 Github Page: ",
"raw": "🌐 Github Page: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://gapszju.github.io/GaussianBlendshape/",
"href": "https://gapszju.github.io/GaussianBlendshape/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📚 More Papers: more cutting-edge research presented at other conferences in the ",
"raw": "📚 More Papers: more cutting-edge research presented at other conferences in the ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers",
"href": null,
"resource": {
"type": "space",
"id": "DmitryRyumin/NewEraAI-Papers",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " curated by ",
"raw": " curated by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@DmitryRyumin",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "DmitryRyumin",
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀 Added to the Avatars Collection: ",
"raw": "🚀 Added to the Avatars Collection: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"href": null,
"resource": {
"type": "collection",
"id": "DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"discussionNum": null
},
"url": "https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔍 Keywords: #3DAnimation #HeadAvatar #GaussianBlendshapes #FacialAnimation #RealTimeRendering #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation",
"raw": "🔍 Keywords: #3DAnimation #HeadAvatar #GaussianBlendshapes #FacialAnimation #RealTimeRendering #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀🎭🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟🎭🚀
📄 Title: 3D Gaussian Blendshapes for Head Avatar Animation 🔝
📝 Description: 3D Gaussian Blendshapes for Head Avatar Animation is a novel method for modeling and animating photorealistic head avatars from monocular video input.
👥 Authors: Shengjie Ma, Yanlin Weng, Tianjia Shao, and Kun Zhou
📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸
📄 Paper: https://huggingface.co/papers/2404.19398
🌐 Github Page: https://gapszju.github.io/GaussianBlendshape/
📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin
🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36
🔍 Keywords: #3DAnimation #HeadAvatar #GaussianBlendshapes #FacialAnimation #RealTimeRendering #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg",
"fullname": "Dmitry Ryumin",
"name": "DmitryRyumin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 377,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/_OAWinmE0fYXKa50FuQel.mp4"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/YE5PWqGqF81Eu2ay06yth.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/N9mtrvNtScw1XbVnRkLMT.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/mlfpFebTip5DFvgcKF-J2.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/l4qEVgic0Kjl2RfSTk993.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/zOJyIK5mKU_g3MzL-eLIQ.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/H8MC6j1l6d1x-6wiMbIm2.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/ypfkrm3Q54bGTOofPMaZf.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg",
"fullname": "Dmitry Ryumin",
"name": "DmitryRyumin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 377
}
] | [
{
"reaction": "🔥",
"users": [
"DmitryRyumin",
"Ramayanti",
"kelork",
"boksman",
"Ndonda871",
"pedi",
"ShahabVFX",
"ahmedzx",
"afoam",
"thomwolf",
"danielus"
],
"count": 11
}
] | 2024-05-05T21:07:56.000Z | 2024-05-05T21:11:02.615Z | [] | /posts/DmitryRyumin/802475347987275 | 2,650 | 0 |
547379630989113 | [
{
"type": "text",
"value": "🔥 Did you know that you can try out Play.HT 2.0 and OpenVoice V2 on the TTS Arena for free?",
"raw": "🔥 Did you know that you can try out Play.HT 2.0 and OpenVoice V2 on the TTS Arena for free?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Enter text and vote on which model is superior!",
"raw": "Enter text and vote on which model is superior!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/TTS-AGI/TTS-Arena",
"href": null,
"resource": {
"type": "space",
"id": "TTS-AGI/TTS-Arena",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/TTS-AGI/TTS-Arena",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔥 Did you know that you can try out Play.HT 2.0 and OpenVoice V2 on the TTS Arena for free?
Enter text and vote on which model is superior!
https://huggingface.co/spaces/TTS-AGI/TTS-Arena | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png",
"fullname": "mrfakename",
"name": "mrfakename",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 969,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"pedi",
"ShahabVFX",
"Gatozu35",
"victor"
],
"count": 5
}
] | 2024-05-05T19:30:52.000Z | 2024-05-05T19:31:09.621Z | [] | /posts/mrfakename/547379630989113 | 2,474 | 0 |
679817419734518 | [
{
"type": "text",
"value": "Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗",
"raw": "Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/bQFX1iFbXEBXcQvUNL811.png",
"fullname": "Di Zhang",
"name": "qq8933",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 108,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"pedi",
"ShahabVFX",
"dillfrescott",
"rbiswasfc"
],
"count": 4
},
{
"reaction": "🚀",
"users": [
"pedi",
"deamyasin",
"dillfrescott"
],
"count": 3
}
] | 2024-05-05T16:34:42.000Z | 2024-05-05T16:34:42.123Z | [] | /posts/qq8933/679817419734518 | 2,015 | 0 |
833340983098067 | [
{
"type": "text",
"value": "mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top.",
"raw": "mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/Jaykef/mlx_micrograd",
"href": "https://github.com/Jaykef/mlx_micrograd",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Installation",
"raw": "Installation",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "code_fence",
"value": null,
"raw": "```bash\npip install mlx_micrograd\n```",
"href": null,
"resource": null,
"url": null,
"code": "pip install mlx_micrograd",
"user": null,
"label": null,
"lang": "bash"
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Example usage",
"raw": "Example usage",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Example showing a number of possible supported operations:",
"raw": "Example showing a number of possible supported operations:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "code_fence",
"value": null,
"raw": "```python\nfrom mlx_micrograd.engine import Value\n\na = Value(-4.0)\nb = Value(2.0)\nc = a + b\nd = a * b + b**3\nc += c + 1\nc += 1 + c + (-a)\nd += d * 2 + (b + a).relu()\nd += 3 * d + (b - a).relu()\ne = c - d\nf = e**2\ng = f / 2.0\ng += 10.0 / f\nprint(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass\ng.backward()\nprint(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da\nprint(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db\n```",
"href": null,
"resource": null,
"url": null,
"code": "from mlx_micrograd.engine import Value\n\na = Value(-4.0)\nb = Value(2.0)\nc = a + b\nd = a * b + b**3\nc += c + 1\nc += 1 + c + (-a)\nd += d * 2 + (b + a).relu()\nd += 3 * d + (b - a).relu()\ne = c - d\nf = e**2\ng = f / 2.0\ng += 10.0 / f\nprint(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass\ng.backward()\nprint(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da\nprint(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db",
"user": null,
"label": null,
"lang": "python"
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top.
https://github.com/Jaykef/mlx_micrograd
Installation
```bash
pip install mlx_micrograd
```
Example usage
Example showing a number of possible supported operations:
```python
from mlx_micrograd.engine import Value
a = Value(-4.0)
b = Value(2.0)
c = a + b
d = a * b + b**3
c += c + 1
c += 1 + c + (-a)
d += d * 2 + (b + a).relu()
d += 3 * d + (b - a).relu()
e = c - d
f = e**2
g = f / 2.0
g += 10.0 / f
print(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass
g.backward()
print(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da
print(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db
```
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/9geq5sD5zYWsGyrMINfkk.mp4"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/UQDo2hPR-kh7dUTy6pCFk.png"
}
] | [] | [
{
"reaction": "👍",
"users": [
"akashicmarga",
"ShahabVFX",
"ahmedzx"
],
"count": 3
}
] | 2024-05-05T16:09:11.000Z | 2024-05-05T16:50:28.798Z | [] | /posts/Jaward/833340983098067 | 1,792 | 0 |
618185008303784 | [
{
"type": "text",
"value": "Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth",
"raw": "Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hey everyone!",
"raw": "Hey everyone!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible.",
"raw": "I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls.",
"raw": "I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Let me know if this interests you!",
"raw": "Let me know if this interests you!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Most of the datasets and models I've made have been using these scripts and my approach",
"raw": "Most of the datasets and models I've made have been using these scripts and my approach",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth
Hey everyone!
I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible.
I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls.
Let me know if this interests you!
Most of the datasets and models I've made have been using these scripts and my approach | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64740cf7485a7c8e1bd51ac9/CXZCJm2x4ToT83pEIYyQR.png",
"fullname": "Beckett Dillon",
"name": "Severian",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 175,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"edumunozsala",
"SmartGoldfish",
"rizwanalvi1",
"HeRksTAn",
"algorithm",
"aimillsapps",
"KingNish",
"Dlbk",
"win10",
"Stefan171",
"shivamgpt",
"Ayeleth",
"iweavings",
"Clausss",
"kid502",
"Deepankar27",
"krishnamishra94",
"Taylor658",
"victor",
"Nioi",
"ajibawa-2023",
"nivram189",
"pedi",
"ShahabVFX",
"ahmedzx",
"AtAndDev",
"alt2023",
"schlafen"
],
"count": 28
},
{
"reaction": "❤️",
"users": [
"afkfatih",
"algorithm",
"MultiTrickFox",
"raincandy-u",
"victor",
"nivram189",
"pedi",
"LeroyDyer",
"ahmedzx",
"AtAndDev",
"GaelicThunder"
],
"count": 11
},
{
"reaction": "🚀",
"users": [
"HeRksTAn",
"algorithm",
"victor",
"nivram189",
"pedi",
"AtAndDev",
"Norod78"
],
"count": 7
},
{
"reaction": "🔥",
"users": [
"algorithm",
"victor",
"nivram189",
"pedi",
"AtAndDev"
],
"count": 5
},
{
"reaction": "😎",
"users": [
"algorithm",
"victor",
"nivram189",
"AtAndDev"
],
"count": 4
}
] | 2024-05-04T15:25:22.000Z | 2024-05-08T04:17:19.518Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/642d678777078db98b729188/lYhIEChF4qQG8ltRF3ECw.png",
"fullname": "algorithm",
"name": "algorithm",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62ec82aaaefff8bcb1336b87/1_-oJrfuFTSiHnYv1eBPr.jpeg",
"fullname": "Malich Coory",
"name": "IndrasMirror",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1678188568629-noauth.png",
"fullname": "葉佐俊",
"name": "win10",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 25,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg",
"fullname": "leroy Samuel Dyer",
"name": "LeroyDyer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 84,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/646d811eeb9268aeebc4bfeb/lbGHopd87wkB-TL1rCZHR.jpeg",
"fullname": "iweavings",
"name": "iweavings",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png",
"fullname": "raincandy_U",
"name": "raincandy-u",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 30,
"isFollowing": false
},
{
"avatarUrl": "/avatars/dbd21f8d0624422f232aa0d788e6838b.svg",
"fullname": "yy",
"name": "capti0n",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "/avatars/09d248a6f220819763c810a73a324e8c.svg",
"fullname": "rshee",
"name": "pedi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "/avatars/5a1845b56a7489e029b200d92d7958be.svg",
"fullname": "Ahmed Moawad ",
"name": "ahmedzx",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/630f3e4002ce39336c411048/FXJON7b-aRUiH0_V2uRsi.jpeg",
"fullname": "alkinun",
"name": "AtAndDev",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 19,
"isFollowing": false
}
] | /posts/Severian/618185008303784 | 3,665 | 16 |
870174544714806 | [
{
"type": "text",
"value": "Midjourney Ai",
"raw": "Midjourney Ai",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Midjourney Ai | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/X0KponEOz7eS_5OlAIRi6.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/eOxHkKLmZg342g2gpYP5E.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/u830LW6ybGbZF9ux3NdNj.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/sk88xoVsxA3LxSEDbQ8E1.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/uikhDPksrS9PAh2m10KcT.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/PNXjOu_09ZOgDvLe1gXEq.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"Rohitkhatri75436",
"ShahabVFX"
],
"count": 2
},
{
"reaction": "👍",
"users": [
"ssml2050"
],
"count": 1
}
] | 2024-05-04T12:53:09.000Z | 2024-05-10T11:59:19.110Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1660344552924-noauth.png",
"fullname": "Ilya Shigabeev",
"name": "frappuccino",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
}
] | /posts/phenixrhyder/870174544714806 | 3,205 | 3 |
795134066112215 | [
{
"type": "text",
"value": "Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding",
"raw": "Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Paper: ",
"raw": "Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2405.00004",
"href": null,
"resource": {
"type": "paper",
"id": "2405.00004",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2405.00004",
"code": null,
"user": null,
"label": "Self-healing Nodes with Adaptive Data-Sharding (2405.00004)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability.",
"raw": "The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Key Concepts:",
"raw": "Key Concepts:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing.",
"raw": "- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals.",
"raw": "- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance.",
"raw": "- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Methodology:",
"raw": "Methodology:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The approach consists of four main steps:",
"raw": "The approach consists of four main steps:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Temporal data sharding based on data's temporal characteristics.",
"raw": "- Temporal data sharding based on data's temporal characteristics.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Self-replicating nodes to enhance data availability and reliability.",
"raw": "- Self-replicating nodes to enhance data availability and reliability.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Fractal regeneration for robust recovery mechanisms.",
"raw": "- Fractal regeneration for robust recovery mechanisms.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Predictive sharding using consistent hashing to anticipate and adapt to future data trends.",
"raw": "- Predictive sharding using consistent hashing to anticipate and adapt to future data trends.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Results and Analysis:",
"raw": "Results and Analysis:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing.",
"raw": "Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Applications:",
"raw": "Applications:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience.",
"raw": "The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding
Paper: https://huggingface.co/papers/2405.00004
The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability.
Key Concepts:
- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing.
- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals.
- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance.
Methodology:
The approach consists of four main steps:
- Temporal data sharding based on data's temporal characteristics.
- Self-replicating nodes to enhance data availability and reliability.
- Fractal regeneration for robust recovery mechanisms.
- Predictive sharding using consistent hashing to anticipate and adapt to future data trends.
Results and Analysis:
Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing.
Applications:
The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63dc683562dc193e6d45ceb3/hEjuQkt6RKxl-sUBeDxRp.png",
"fullname": "Ayush Thakur",
"name": "ayush-thakur02",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/qmmIsq2TFPG37NmerYlyg.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/ccoMoUKmE46mk-YSfLkgg.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/ZpelR9aLP8FbTwmmA0YcW.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/7d7G1l_SLhd3h8AXDxyY8.png"
}
] | [] | [
{
"reaction": "👍",
"users": [
"ssml2050",
"ayush-thakur02",
"ShahabVFX",
"samusenps",
"hiyouga"
],
"count": 5
},
{
"reaction": "👀",
"users": [
"victor"
],
"count": 1
},
{
"reaction": "❤️",
"users": [
"samusenps"
],
"count": 1
},
{
"reaction": "🔥",
"users": [
"samusenps"
],
"count": 1
}
] | 2024-05-04T07:18:49.000Z | 2024-05-04T07:18:49.848Z | [] | /posts/ayush-thakur02/795134066112215 | 2,908 | 0 |
968496263925772 | [
{
"type": "text",
"value": "# Thoughts on Neural Scaling Laws",
"raw": "# Thoughts on Neural Scaling Laws",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources.",
"raw": "When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws. ",
"raw": "The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively.",
"raw": "Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models",
"raw": "As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:)",
"raw": "I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "References:",
"raw": "References:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Scaling Laws for Neural Language Models: ",
"raw": "- Scaling Laws for Neural Language Models: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/pdf/2001.08361",
"href": "https://arxiv.org/pdf/2001.08361",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Scaling Laws for Autoregressive Generative Modeling: ",
"raw": "- Scaling Laws for Autoregressive Generative Modeling: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/abs/2010.14701",
"href": "https://arxiv.org/abs/2010.14701",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- LLMs in a flash: ",
"raw": "- LLMs in a flash: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/abs/2312.11514",
"href": "https://arxiv.org/abs/2312.11514",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Phi-3 Technical Report: ",
"raw": "- Phi-3 Technical Report: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/abs/2404.14219",
"href": "https://arxiv.org/abs/2404.14219",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Gemma 2B: ",
"raw": "- Gemma 2B: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/pdf/2403.08295",
"href": "https://arxiv.org/pdf/2403.08295",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | # Thoughts on Neural Scaling Laws
When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources.
The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws.
Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively.
As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models
I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:)
References:
- Scaling Laws for Neural Language Models: https://arxiv.org/pdf/2001.08361
- Scaling Laws for Autoregressive Generative Modeling: https://arxiv.org/abs/2010.14701
- LLMs in a flash: https://arxiv.org/abs/2312.11514
- Phi-3 Technical Report: https://arxiv.org/abs/2404.14219
- Gemma 2B: https://arxiv.org/pdf/2403.08295 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/2Dp8dua75lkx8Z8Adf6EW.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/b_PjwPYIBmL2ZQPCUhHKk.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/zIdvNDx44FQVyhI0g7K1-.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/pw_HjW_LHiVIeXN9PZofU.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/3wKsQPJyTMmZ4M_o_oQpW.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/m_yPXI_RMwxGyIo7vWj_2.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/Ua-dF5X0D0xEYyewtKeAa.png"
}
] | [] | [
{
"reaction": "👍",
"users": [
"ssml2050",
"ShahabVFX",
"adorkin"
],
"count": 3
}
] | 2024-05-04T04:37:46.000Z | 2024-05-05T01:11:48.948Z | [] | /posts/Jaward/968496263925772 | 2,456 | 0 |
914231688157819 | [
{
"type": "text",
"value": "The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well.",
"raw": "The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow).",
"raw": "I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Windows Tutorial : ",
"raw": "Windows Tutorial : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://youtu.be/m4pcIeAVQD0",
"href": "https://youtu.be/m4pcIeAVQD0",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Cloud (Massed Compute, RunPod & Kaggle) Tutorial : ",
"raw": "Cloud (Massed Compute, RunPod & Kaggle) Tutorial : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://youtu.be/LeHfgq_lAXU",
"href": "https://youtu.be/LeHfgq_lAXU",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well.
I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow).
Windows Tutorial : https://youtu.be/m4pcIeAVQD0
Cloud (Massed Compute, RunPod & Kaggle) Tutorial : https://youtu.be/LeHfgq_lAXU
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1672531901326-6345bd89fe134dfd7a0dba40.png",
"fullname": "Furkan Gözükara",
"name": "MonsterMMORPG",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 376,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/6FAf1Qa3ScZOsDUViuqIx.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/8vn_YR30RoPaRssRTIHah.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/NvTqEBlqwCDzwvXI3qEDE.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Rvz-KVu-UPPobFv2_twyO.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/wn0XIlLuqojp_PXmYNaMY.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/fQPgfa3j5qj3F2AFvNVGk.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/q48hC5Ya925Bxc0NaD5kr.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Dfjky0p5YqSU7F2QNgJa7.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/1FM9nqcqfrohQiffAW0Qe.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Lq6qZUTbDm8NzTYGVnVSA.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ymQ2IzhvG0rwaONN4cmVP.png"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"Jaward",
"ShahabVFX"
],
"count": 2
},
{
"reaction": "👍",
"users": [
"kevinpics"
],
"count": 1
}
] | 2024-05-03T23:00:42.000Z | 2024-05-03T23:00:42.154Z | [] | /posts/MonsterMMORPG/914231688157819 | 2,734 | 0 |
175833406902405 | [
{
"type": "text",
"value": "ChemLLM-20B SFT and DPO is coming!🤗",
"raw": "ChemLLM-20B SFT and DPO is coming!🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | ChemLLM-20B SFT and DPO is coming!🤗 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/bQFX1iFbXEBXcQvUNL811.png",
"fullname": "Di Zhang",
"name": "qq8933",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 108,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"ajibawa-2023",
"ssml2050",
"ShahabVFX",
"n4ze3m",
"dillfrescott"
],
"count": 5
}
] | 2024-05-03T19:56:12.000Z | 2024-05-05T01:41:01.774Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/bQFX1iFbXEBXcQvUNL811.png",
"fullname": "Di Zhang",
"name": "qq8933",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 108,
"isFollowing": false
}
] | /posts/qq8933/175833406902405 | 2,391 | 1 |
762050501752293 | [
{
"type": "text",
"value": "A new dataset for anyone interested in Satellite imagery: 3 million ",
"raw": "A new dataset for anyone interested in Satellite imagery: 3 million ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Satellogic",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Satellogic",
"label": null,
"lang": null
},
{
"type": "text",
"value": " images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license.",
"raw": " images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Interesting potential in journalism.",
"raw": "Interesting potential in journalism.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/satellogic/EarthView",
"href": null,
"resource": {
"type": "dataset",
"id": "satellogic/EarthView",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/satellogic/EarthView",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | A new dataset for anyone interested in Satellite imagery: 3 million @Satellogic images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license.
Interesting potential in journalism.
https://huggingface.co/datasets/satellogic/EarthView | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"Taylor658",
"ajibawa-2023",
"Ndonda871",
"qwertcz",
"pedi",
"ShahabVFX"
],
"count": 6
},
{
"reaction": "👍",
"users": [
"ssml2050",
"Ndonda871",
"qwertcz"
],
"count": 3
}
] | 2024-05-03T17:01:58.000Z | 2024-05-03T17:01:58.652Z | [] | /posts/fdaudens/762050501752293 | 2,479 | 0 |
101449295408409 | [
{
"type": "text",
"value": "Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF! ",
"raw": "Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider.",
"raw": "Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Coverage:",
"raw": "Coverage:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench)",
"raw": "‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "‣ Throughput (tokens/s: median, P5, P25, P75, P95)",
"raw": "‣ Throughput (tokens/s: median, P5, P25, P75, P95)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "‣ Latency (TTFT: median, P5, P25, P75, P95)",
"raw": "‣ Latency (TTFT: median, P5, P25, P75, P95)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "‣ Context window",
"raw": "‣ Context window",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "‣ OpenAI library compatibility",
"raw": "‣ OpenAI library compatibility",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Link to Space: ",
"raw": "Link to Space: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard",
"href": null,
"resource": {
"type": "space",
"id": "ArtificialAnalysis/LLM-Performance-Leaderboard",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Blog post: ",
"raw": "Blog post: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/leaderboard-artificial-analysis",
"href": "https://huggingface.co/blog/leaderboard-artificial-analysis",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF!
Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider.
Coverage:
‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench)
‣ Throughput (tokens/s: median, P5, P25, P75, P95)
‣ Latency (TTFT: median, P5, P25, P75, P95)
‣ Context window
‣ OpenAI library compatibility
Link to Space: https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard
Blog post: https://huggingface.co/blog/leaderboard-artificial-analysis | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65ff2f9fcc7a4f35567b9098/oilyj1kmz11ifVoCLrK-H.png",
"fullname": "George Cameron",
"name": "georgewritescode",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 8,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65ff2f9fcc7a4f35567b9098/dJTB2QfvXP6wlqA7tneQm.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"jeffboudier",
"victor",
"Kukedlc",
"clefourrier",
"Ijithad",
"ajibawa-2023",
"ShahabVFX",
"not-lain"
],
"count": 8
},
{
"reaction": "❤️",
"users": [
"clefourrier",
"PineappleParadiseDreams",
"not-lain"
],
"count": 3
}
] | 2024-05-03T16:40:46.000Z | 2024-05-03T16:41:19.903Z | [] | /posts/georgewritescode/101449295408409 | 2,308 | 0 |
550199376490766 | [
{
"type": "text",
"value": "Only 14 languages have DPO preference style datasets on the Hugging Face Hub (",
"raw": "Only 14 languages have DPO preference style datasets on the Hugging Face Hub (",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/spaces/DIBT/preference_data_by_language",
"href": "https://huggingface.co/spaces/DIBT/preference_data_by_language",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ") Let's improve that! How?",
"raw": ") Let's improve that! How?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The Cohere For AI Aya dataset ",
"raw": "The Cohere For AI Aya dataset ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/CohereForAI/aya_dataset",
"href": null,
"resource": {
"type": "dataset",
"id": "CohereForAI/aya_dataset",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/CohereForAI/aya_dataset",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " has human-annotated prompt-completion pairs in 71 languages. We can use this to create DPO datasets for more languages!",
"raw": " has human-annotated prompt-completion pairs in 71 languages. We can use this to create DPO datasets for more languages!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Using Aya's prompt/response pairs as a starting point we can use an LLM to generate an additional response to each prompt. We then use an LLM Judge to rank each response.",
"raw": "Using Aya's prompt/response pairs as a starting point we can use an LLM to generate an additional response to each prompt. We then use an LLM Judge to rank each response.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ In some/many languages, human responses may be better than LLM ones but we may want to check that assumption for some languages. ",
"raw": "✅ In some/many languages, human responses may be better than LLM ones but we may want to check that assumption for some languages. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀 We use Argilla's distilabel library to push data to Argilla for validation. This also allows us to determine if an LLM judge is effective for different languages.",
"raw": "🚀 We use Argilla's distilabel library to push data to Argilla for validation. This also allows us to determine if an LLM judge is effective for different languages.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "As an example of what this pipeline produces:",
"raw": "As an example of what this pipeline produces:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- ",
"raw": "- ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/datasets/DIBT/aya_dutch_dpo",
"href": "https://huggingface.co/datasets/DIBT/aya_dutch_dpo",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " a DPO style dataset for Dutch using Llama 3 as a generator/judge LM.",
"raw": " a DPO style dataset for Dutch using Llama 3 as a generator/judge LM.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- An annotation Space that anyone with a HF account can contribute to: ",
"raw": "- An annotation Space that anyone with a HF account can contribute to: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://dibt-demo-argilla-space.hf.space/dataset/924ef8a8-a447-4563-8806-0e2a668a5314/annotation-mode?page=1&status=pending",
"href": "https://dibt-demo-argilla-space.hf.space/dataset/924ef8a8-a447-4563-8806-0e2a668a5314/annotation-mode?page=1&status=pending",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "As part of Data is Better Together we want to build more DPO datasets. Join us here: ",
"raw": "As part of Data is Better Together we want to build more DPO datasets. Join us here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/data-is-better-together#4-dpoorpo-datasets-for-more-languages",
"href": "https://github.com/huggingface/data-is-better-together#4-dpoorpo-datasets-for-more-languages",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " 🤗",
"raw": " 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Only 14 languages have DPO preference style datasets on the Hugging Face Hub (https://huggingface.co/spaces/DIBT/preference_data_by_language) Let's improve that! How?
The Cohere For AI Aya dataset https://huggingface.co/datasets/CohereForAI/aya_dataset has human-annotated prompt-completion pairs in 71 languages. We can use this to create DPO datasets for more languages!
Using Aya's prompt/response pairs as a starting point we can use an LLM to generate an additional response to each prompt. We then use an LLM Judge to rank each response.
✅ In some/many languages, human responses may be better than LLM ones but we may want to check that assumption for some languages.
🚀 We use Argilla's distilabel library to push data to Argilla for validation. This also allows us to determine if an LLM judge is effective for different languages.
As an example of what this pipeline produces:
- https://huggingface.co/datasets/DIBT/aya_dutch_dpo a DPO style dataset for Dutch using Llama 3 as a generator/judge LM.
- An annotation Space that anyone with a HF account can contribute to: https://dibt-demo-argilla-space.hf.space/dataset/924ef8a8-a447-4563-8806-0e2a668a5314/annotation-mode?page=1&status=pending
As part of Data is Better Together we want to build more DPO datasets. Join us here: https://github.com/huggingface/data-is-better-together#4-dpoorpo-datasets-for-more-languages 🤗 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg",
"fullname": "Daniel van Strien",
"name": "davanstrien",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 410,
"isFollowing": false
} | [] | [] | [
{
"reaction": "❤️",
"users": [
"librarian-bot",
"fdaudens",
"ShahabVFX",
"comarproject",
"anakin87"
],
"count": 5
},
{
"reaction": "🔥",
"users": [
"ZennyKenny",
"celsowm"
],
"count": 2
}
] | 2024-05-03T13:56:04.000Z | 2024-05-03T14:01:58.677Z | [] | /posts/davanstrien/550199376490766 | 2,254 | 0 |
125153547149389 | [
{
"type": "text",
"value": "🚀🚀🚀🚀 Introducing AutoTrain Configs! 🚀🚀🚀🚀",
"raw": "🚀🚀🚀🚀 Introducing AutoTrain Configs! 🚀🚀🚀🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Now you can train models using yaml config files! 💥 These configs are easy to understand and are not at all overwhelming. So, even a person with almost zero knowledge of machine learning can train state of the art models without writing any code. Check out example configs in the config directory of autotrain-advanced github repo and feel free to share configs by creating a pull request 🤗",
"raw": "Now you can train models using yaml config files! 💥 These configs are easy to understand and are not at all overwhelming. So, even a person with almost zero knowledge of machine learning can train state of the art models without writing any code. Check out example configs in the config directory of autotrain-advanced github repo and feel free to share configs by creating a pull request 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Github repo: ",
"raw": "Github repo: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/autotrain-advanced",
"href": "https://github.com/huggingface/autotrain-advanced",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀🚀🚀🚀 Introducing AutoTrain Configs! 🚀🚀🚀🚀
Now you can train models using yaml config files! 💥 These configs are easy to understand and are not at all overwhelming. So, even a person with almost zero knowledge of machine learning can train state of the art models without writing any code. Check out example configs in the config directory of autotrain-advanced github repo and feel free to share configs by creating a pull request 🤗
Github repo: https://github.com/huggingface/autotrain-advanced | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5fa19f4ba13e063b8b2b5e11/nGVHdTYX2udnt-K8mqY27.jpeg",
"fullname": "Abhishek Thakur",
"name": "abhishek",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 1383,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"not-lain",
"jeffboudier",
"sbarman25",
"derek-thomas",
"ajibawa-2023",
"Stefan171",
"radames",
"imad40",
"ShahabVFX",
"ndaru12",
"ThatOneCoder"
],
"count": 11
},
{
"reaction": "🚀",
"users": [
"not-lain",
"Taylor658",
"derek-thomas",
"radames",
"imad40",
"Tonic"
],
"count": 6
}
] | 2024-05-03T12:43:17.000Z | 2024-05-20T12:31:05.062Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg",
"fullname": "Lain",
"name": "not-lain",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 941,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/653a75b19430762a5ca8674b/4wW76XiBCPAoKyVZUTNwX.jpeg",
"fullname": "ThatOneCoder",
"name": "ThatOneCoder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 7,
"isFollowing": false
}
] | /posts/abhishek/125153547149389 | 3,055 | 2 |
943666667685852 | [
{
"type": "text",
"value": "May the fourth be with you... Ai yoda #starwarsday",
"raw": "May the fourth be with you... Ai yoda #starwarsday",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | May the fourth be with you... Ai yoda #starwarsday | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/FlVpCoqQnMpA1B78DWKUO.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/qeZKm29YBJbbf2_t9pf7A.webp"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/_icQRu_psZJjznkSpDMMD.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/iLgy1fElhEuc9ZrakTN_1.jpeg"
}
] | [] | [] | 2024-05-03T11:57:56.000Z | 2024-05-03T11:58:27.380Z | [] | /posts/phenixrhyder/943666667685852 | 1,789 | 0 |
644129530281733 | [
{
"type": "text",
"value": "🔍 Today's (self-serving) pick in Interpretability & Analysis of LMs: ",
"raw": "🔍 Today's (self-serving) pick in Interpretability & Analysis of LMs: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "A Primer on the Inner Workings of Transformer-based Language Models ",
"raw": "A Primer on the Inner Workings of Transformer-based Language Models ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "by ",
"raw": "by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@javifer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "javifer",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@gsarti",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "gsarti",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@arianna-bis",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "arianna-bis",
"label": null,
"lang": null
},
{
"type": "text",
"value": " and M. R. Costa-jussà ",
"raw": " and M. R. Costa-jussà ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "(",
"raw": "(",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@mt-upc",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "mt-upc",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@GroNLP",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "GroNLP",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@facebook",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "facebook",
"label": null,
"lang": null
},
{
"type": "text",
"value": ")",
"raw": ")",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This primer can serve as a comprehensive introduction to recent advances in interpretability for Transformer-based LMs for a technical audience, employing a unified notation to introduce network modules and present state-of-the-art interpretability methods.",
"raw": "This primer can serve as a comprehensive introduction to recent advances in interpretability for Transformer-based LMs for a technical audience, employing a unified notation to introduce network modules and present state-of-the-art interpretability methods.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Interpretability methods are presented with detailed formulations and categorized as either localizing the inputs or model components responsible for a particular prediction or decoding information stored in learned representations. Then, various insights on the role of specific model components are summarized alongside recent work using model internals to direct editing and mitigate hallucinations.",
"raw": "Interpretability methods are presented with detailed formulations and categorized as either localizing the inputs or model components responsible for a particular prediction or decoding information stored in learned representations. Then, various insights on the role of specific model components are summarized alongside recent work using model internals to direct editing and mitigate hallucinations.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Finally, the paper provides a detailed picture of the open-source interpretability tools landscape, supporting the need for open-access models to advance interpretability research.",
"raw": "Finally, the paper provides a detailed picture of the open-source interpretability tools landscape, supporting the need for open-access models to advance interpretability research.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📄 Paper: ",
"raw": "📄 Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2405.00208",
"href": null,
"resource": {
"type": "paper",
"id": "2405.00208",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2405.00208",
"code": null,
"user": null,
"label": "A Primer on the Inner Workings of Transformer-based Language Models (2405.00208)",
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔍 All daily picks: ",
"raw": "🔍 All daily picks: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9",
"href": "https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔍 Today's (self-serving) pick in Interpretability & Analysis of LMs:
A Primer on the Inner Workings of Transformer-based Language Models
by @javifer @gsarti @arianna-bis and M. R. Costa-jussà
(@mt-upc, @GroNLP, @facebook)
This primer can serve as a comprehensive introduction to recent advances in interpretability for Transformer-based LMs for a technical audience, employing a unified notation to introduce network modules and present state-of-the-art interpretability methods.
Interpretability methods are presented with detailed formulations and categorized as either localizing the inputs or model components responsible for a particular prediction or decoding information stored in learned representations. Then, various insights on the role of specific model components are summarized alongside recent work using model internals to direct editing and mitigate hallucinations.
Finally, the paper provides a detailed picture of the open-source interpretability tools landscape, supporting the need for open-access models to advance interpretability research.
📄 Paper: https://huggingface.co/papers/2405.00208
🔍 All daily picks: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1670231290373-5e7749883d77a72421292d07.jpeg",
"fullname": "Gabriele Sarti",
"name": "gsarti",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 205,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/8hujqZLmtbr1qTk0lUicS.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/LARdlopywRT8octNHoMz5.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/Vs9uNQDiRck8MHXwWI18c.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/gV3shR7qgjzuP3ff_pZqV.png"
}
] | [
{
"avatarUrl": "/avatars/9c91a18cdc53587422311fd13a14833e.svg",
"fullname": "Arianna Bisazza",
"name": "arianna-bis",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1670231290373-5e7749883d77a72421292d07.jpeg",
"fullname": "Gabriele Sarti",
"name": "gsarti",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 205
},
{
"avatarUrl": "/avatars/bcc94a31fab7486ca9d018245a289fb0.svg",
"fullname": "Javier Ferrando",
"name": "javifer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5
}
] | [
{
"reaction": "🚀",
"users": [
"Taylor658",
"giux78",
"lunarflu",
"javifer",
"ShahabVFX"
],
"count": 5
},
{
"reaction": "🧠",
"users": [
"lunarflu"
],
"count": 1
}
] | 2024-05-03T09:03:55.000Z | 2024-05-03T09:03:55.666Z | [] | /posts/gsarti/644129530281733 | 2,804 | 0 |
814489174970402 | [
{
"type": "text",
"value": "The Open Medical-LLM Leaderboard is now up on HF Spaces. 🤗",
"raw": "The Open Medical-LLM Leaderboard is now up on HF Spaces. 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard",
"href": null,
"resource": {
"type": "space",
"id": "openlifescienceai/open_medical_llm_leaderboard",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It will be interesting to add the results of the just announced Med-Gemini model to the Leaderboard to see how it compares and if its stated 91.1% MedQA benchmark is accurate. ",
"raw": "It will be interesting to add the results of the just announced Med-Gemini model to the Leaderboard to see how it compares and if its stated 91.1% MedQA benchmark is accurate. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.18416",
"href": null,
"resource": {
"type": "paper",
"id": "2404.18416",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.18416",
"code": null,
"user": null,
"label": "Capabilities of Gemini Models in Medicine (2404.18416)",
"lang": null
}
] | The Open Medical-LLM Leaderboard is now up on HF Spaces. 🤗
https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard
It will be interesting to add the results of the just announced Med-Gemini model to the Leaderboard to see how it compares and if its stated 91.1% MedQA benchmark is accurate.
https://huggingface.co/papers/2404.18416 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/GXN8mEmaq3rfITRrw7GeZ.jpeg",
"fullname": "atayloraerospace",
"name": "Taylor658",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 76,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"lunarflu",
"zaggi2",
"ShahabVFX"
],
"count": 3
},
{
"reaction": "🚀",
"users": [
"lunarflu"
],
"count": 1
},
{
"reaction": "❤️",
"users": [
"lunarflu"
],
"count": 1
}
] | 2024-05-03T03:56:33.000Z | 2024-05-06T19:20:43.134Z | [] | /posts/Taylor658/814489174970402 | 2,264 | 1 |
608661762517191 | [
{
"type": "text",
"value": "I've added new collections to the Journalists on 🤗 community, focusing on Data Visualization, Optical Character Recognition, and Multimodal Models:",
"raw": "I've added new collections to the Journalists on 🤗 community, focusing on Data Visualization, Optical Character Recognition, and Multimodal Models:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- TinyChart-3B: This model interprets data visualizations based on your prompts. It can generate the underlying data table from a chart or recreate the chart with Python code.",
"raw": "- TinyChart-3B: This model interprets data visualizations based on your prompts. It can generate the underlying data table from a chart or recreate the chart with Python code.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- PDF to OCR: Convert your PDFs to text—ideal for FOI records sent as images.",
"raw": "- PDF to OCR: Convert your PDFs to text—ideal for FOI records sent as images.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Idefics-8b: A multimodal model that allows you to ask questions about images.",
"raw": "- Idefics-8b: A multimodal model that allows you to ask questions about images.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Explore these tools here: 👉 ",
"raw": "Explore these tools here: 👉 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/JournalistsonHF",
"href": "https://huggingface.co/JournalistsonHF",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I've added new collections to the Journalists on 🤗 community, focusing on Data Visualization, Optical Character Recognition, and Multimodal Models:
- TinyChart-3B: This model interprets data visualizations based on your prompts. It can generate the underlying data table from a chart or recreate the chart with Python code.
- PDF to OCR: Convert your PDFs to text—ideal for FOI records sent as images.
- Idefics-8b: A multimodal model that allows you to ask questions about images.
Explore these tools here: 👉 https://huggingface.co/JournalistsonHF | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"Taylor658",
"damerajee",
"ucalyptus",
"JuliusSandmann",
"lunarflu",
"ShahabVFX"
],
"count": 6
},
{
"reaction": "❤️",
"users": [
"lunarflu",
"Lewdiculous"
],
"count": 2
}
] | 2024-05-03T02:04:36.000Z | 2024-05-03T02:04:36.966Z | [] | /posts/fdaudens/608661762517191 | 2,054 | 0 |
353109603007626 | [
{
"type": "text",
"value": "Introducing llama-3-neural-chat-v2.2-8b! This powerful conversational AI model builds on Meta's Llama 3, fine-tuned by Locutusque for enhanced performance in coding, math & writing. ",
"raw": "Introducing llama-3-neural-chat-v2.2-8b! This powerful conversational AI model builds on Meta's Llama 3, fine-tuned by Locutusque for enhanced performance in coding, math & writing. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/Locutusque/llama-3-neural-chat-v2.2-8B",
"href": null,
"resource": {
"type": "model",
"id": "Locutusque/llama-3-neural-chat-v2.2-8B",
"discussionNum": null
},
"url": "https://huggingface.co/Locutusque/llama-3-neural-chat-v2.2-8B",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Introducing llama-3-neural-chat-v2.2-8b! This powerful conversational AI model builds on Meta's Llama 3, fine-tuned by Locutusque for enhanced performance in coding, math & writing.
https://huggingface.co/Locutusque/llama-3-neural-chat-v2.2-8B | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg",
"fullname": "Sebastian Gabarain",
"name": "Locutusque",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 180,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"aloobun",
"Tonic",
"lunarflu",
"jlinux",
"ShahabVFX",
"mohammadaeid"
],
"count": 6
},
{
"reaction": "🚀",
"users": [
"Taylor658",
"louisbrulenaudet",
"Tonic",
"lunarflu",
"mohammadaeid"
],
"count": 5
},
{
"reaction": "🤗",
"users": [
"Tonic",
"lunarflu"
],
"count": 2
},
{
"reaction": "➕",
"users": [
"Tonic",
"lunarflu"
],
"count": 2
},
{
"reaction": "👀",
"users": [
"Tonic",
"lunarflu"
],
"count": 2
}
] | 2024-05-02T23:40:02.000Z | 2024-05-20T14:58:46.980Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6414a963be60230f2f3cf41c/FEHZJy7QyCoz4Sqe_cM9r.png",
"fullname": "dadadies",
"name": "dadadies",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg",
"fullname": "Sebastian Gabarain",
"name": "Locutusque",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 180,
"isFollowing": false
}
] | /posts/Locutusque/353109603007626 | 4,099 | 4 |
866917939278630 | [
{
"type": "text",
"value": "I just shared a blogpost on ",
"raw": "I just shared a blogpost on ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://nateraw.com",
"href": "https://nateraw.com",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " explaining the motivation + process of training ",
"raw": " explaining the motivation + process of training ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/nateraw/musicgen-songstarter-v0.2",
"href": null,
"resource": {
"type": "model",
"id": "nateraw/musicgen-songstarter-v0.2",
"discussionNum": null
},
"url": "https://huggingface.co/nateraw/musicgen-songstarter-v0.2",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " - including training details, WandB logs, hparams, and notes on previous experiments.",
"raw": " - including training details, WandB logs, hparams, and notes on previous experiments.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check it out here ⤵️",
"raw": "Check it out here ⤵️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://nateraw.com/posts/training_musicgen_songstarter.html",
"href": "https://nateraw.com/posts/training_musicgen_songstarter.html",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ":) still kinda a WIP so if there's anything else you want to see, let me know.",
"raw": ":) still kinda a WIP so if there's anything else you want to see, let me know.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I just shared a blogpost on https://nateraw.com explaining the motivation + process of training https://huggingface.co/nateraw/musicgen-songstarter-v0.2 - including training details, WandB logs, hparams, and notes on previous experiments.
Check it out here ⤵️
https://nateraw.com/posts/training_musicgen_songstarter.html
:) still kinda a WIP so if there's anything else you want to see, let me know.
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1594936097363-noauth.jpeg",
"fullname": "Nate Raw",
"name": "nateraw",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 161,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f10cb46636b661bdc42318b/sCM2LtscVWDbG52r2uXLi.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"rooben",
"lunarflu",
"akashicmarga",
"allandclive",
"ShahabVFX",
"tvergho",
"pbotsaris",
"multimodalart",
"aceeee"
],
"count": 9
},
{
"reaction": "❤️",
"users": [
"lunarflu",
"aceeee",
"Norod78",
"tvergho",
"multimodalart"
],
"count": 5
}
] | 2024-05-02T23:18:15.000Z | 2024-05-08T09:17:54.649Z | [
{
"avatarUrl": "/avatars/a75279f2fd9a1ba8d9b8cd4f720dc693.svg",
"fullname": "rooben",
"name": "rooben",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1594936097363-noauth.jpeg",
"fullname": "Nate Raw",
"name": "nateraw",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 161,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6470521ed9360cd9d8e54027/Q9kt3-gTCX30nI2OWFkXY.jpeg",
"fullname": "Pedro Botsaris",
"name": "pbotsaris",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
}
] | /posts/nateraw/866917939278630 | 3,574 | 3 |
461901592525067 | [
{
"type": "text",
"value": "A list of my favorite articles I've written on Hugging Face (the others are kind of mid imo):",
"raw": "A list of my favorite articles I've written on Hugging Face (the others are kind of mid imo):",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/a-new-era-of-enzyme-engineering",
"href": "https://huggingface.co/blog/AmelieSchreiber/a-new-era-of-enzyme-engineering",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/protein-optimization-and-design",
"href": "https://huggingface.co/blog/AmelieSchreiber/protein-optimization-and-design",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/rfdiffusion-potentials",
"href": "https://huggingface.co/blog/AmelieSchreiber/rfdiffusion-potentials",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/mutation-scoring",
"href": "https://huggingface.co/blog/AmelieSchreiber/mutation-scoring",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/esmbind",
"href": "https://huggingface.co/blog/AmelieSchreiber/esmbind",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/esm2-ptm",
"href": "https://huggingface.co/blog/AmelieSchreiber/esm2-ptm",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/directed-evolution-with-esm2",
"href": "https://huggingface.co/blog/AmelieSchreiber/directed-evolution-with-esm2",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/intrinsic-dimension-of-proteins",
"href": "https://huggingface.co/blog/AmelieSchreiber/intrinsic-dimension-of-proteins",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/plm-persistent-homology-msa-replacement",
"href": "https://huggingface.co/blog/AmelieSchreiber/plm-persistent-homology-msa-replacement",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/AmelieSchreiber/faster-pha",
"href": "https://huggingface.co/blog/AmelieSchreiber/faster-pha",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | A list of my favorite articles I've written on Hugging Face (the others are kind of mid imo):
https://huggingface.co/blog/AmelieSchreiber/a-new-era-of-enzyme-engineering
https://huggingface.co/blog/AmelieSchreiber/protein-optimization-and-design
https://huggingface.co/blog/AmelieSchreiber/rfdiffusion-potentials
https://huggingface.co/blog/AmelieSchreiber/mutation-scoring
https://huggingface.co/blog/AmelieSchreiber/esmbind
https://huggingface.co/blog/AmelieSchreiber/esm2-ptm
https://huggingface.co/blog/AmelieSchreiber/directed-evolution-with-esm2
https://huggingface.co/blog/AmelieSchreiber/intrinsic-dimension-of-proteins
https://huggingface.co/blog/AmelieSchreiber/plm-persistent-homology-msa-replacement
https://huggingface.co/blog/AmelieSchreiber/faster-pha | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64191ec8d459c9e7fbb0236b/7BeTgySZzmFCaVpntaYgP.jpeg",
"fullname": "Amelie Schreiber",
"name": "AmelieSchreiber",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 737,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"mmhamdy",
"Taylor658",
"hsevi",
"lunarflu",
"victor",
"vtiyyal1",
"radames",
"samusenps",
"ehottl",
"AtAndDev",
"kgourgou",
"AglWings",
"high-entropy-gio",
"gabriel-charite",
"raannakasturi"
],
"count": 15
},
{
"reaction": "❤️",
"users": [
"lunarflu",
"vtiyyal1",
"radames",
"samusenps",
"AtAndDev",
"kgourgou",
"Vlansu",
"wangjin2000",
"high-entropy-gio",
"vchupakhin",
"BeanXu",
"lighttt9999",
"bloyal"
],
"count": 13
},
{
"reaction": "😎",
"users": [
"aimraminul",
"AtAndDev"
],
"count": 2
},
{
"reaction": "👍",
"users": [
"high-entropy-gio"
],
"count": 1
}
] | 2024-05-02T22:32:28.000Z | 2024-10-18T16:33:08.723Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg",
"fullname": "Adam Molnar",
"name": "lunarflu",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 333,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/630f3e4002ce39336c411048/FXJON7b-aRUiH0_V2uRsi.jpeg",
"fullname": "alkinun",
"name": "AtAndDev",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 19,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64191ec8d459c9e7fbb0236b/7BeTgySZzmFCaVpntaYgP.jpeg",
"fullname": "Amelie Schreiber",
"name": "AmelieSchreiber",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 737,
"isFollowing": false
}
] | /posts/AmelieSchreiber/461901592525067 | 3,730 | 4 |
797255128298918 | [
{
"type": "text",
"value": "Looking for someone with +10 years of experience training Deep Kolmogorov-Arnold Networks.",
"raw": "Looking for someone with +10 years of experience training Deep Kolmogorov-Arnold Networks.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Any suggestions?",
"raw": "Any suggestions?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Looking for someone with +10 years of experience training Deep Kolmogorov-Arnold Networks.
Any suggestions? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg",
"fullname": "Santiago Viquez",
"name": "santiviquez",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 84,
"isFollowing": false
} | [] | [] | [
{
"reaction": "😔",
"users": [
"andysalerno",
"ucalyptus",
"Ndonda871",
"Montez"
],
"count": 4
},
{
"reaction": "🤯",
"users": [
"YaTharThShaRma999",
"andrew-more"
],
"count": 2
},
{
"reaction": "🧠",
"users": [
"tomaarsen",
"shawon"
],
"count": 2
}
] | 2024-05-02T20:22:38.000Z | 2024-05-02T20:22:38.983Z | [] | /posts/santiviquez/797255128298918 | 1,569 | 0 |
318385306588047 | [
{
"type": "text",
"value": "Hello!",
"raw": "Hello!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The 8B/70B OG Llama-3 models made with the Orthogonal Activation Steering script as been pushed in private.",
"raw": "The 8B/70B OG Llama-3 models made with the Orthogonal Activation Steering script as been pushed in private.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "After multiple test with an empty prompt system, I can confirm it's not uncensored enough, but I wanted to try all the GGUF before (and it take time to do lmao)",
"raw": "After multiple test with an empty prompt system, I can confirm it's not uncensored enough, but I wanted to try all the GGUF before (and it take time to do lmao)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "If you want to try that yourself, here is the script : ",
"raw": "If you want to try that yourself, here is the script : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://gist.github.com/wassname/42aba7168bb83e278fcfea87e70fa3af",
"href": "https://gist.github.com/wassname/42aba7168bb83e278fcfea87e70fa3af",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "And here is the same script that we modified to be able to use it on multiple GPU for 70B : ",
"raw": "And here is the same script that we modified to be able to use it on multiple GPU for 70B : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://files.catbox.moe/ya4rto.ipynb",
"href": "https://files.catbox.moe/ya4rto.ipynb",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Llama3-Unholy-8B-OAS don't have the problem as it was already trained to be less censored, but the OG one was really too much censored.",
"raw": "Llama3-Unholy-8B-OAS don't have the problem as it was already trained to be less censored, but the OG one was really too much censored.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I will try to redo that soon, as it seems to HAVE WORKED for some prompt (as seen on the log, for exemple) but it's not enough.",
"raw": "I will try to redo that soon, as it seems to HAVE WORKED for some prompt (as seen on the log, for exemple) but it's not enough.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "32 entry of the dataset is clearly not enough, but it's okay, I really wanted to try that as it was something new.",
"raw": "32 entry of the dataset is clearly not enough, but it's okay, I really wanted to try that as it was something new.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I could take the Unholy way and retrain the 70B before using OAS but it should work without, that's not the goal.",
"raw": "I could take the Unholy way and retrain the 70B before using OAS but it should work without, that's not the goal.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Hello!
The 8B/70B OG Llama-3 models made with the Orthogonal Activation Steering script as been pushed in private.
After multiple test with an empty prompt system, I can confirm it's not uncensored enough, but I wanted to try all the GGUF before (and it take time to do lmao)
If you want to try that yourself, here is the script : https://gist.github.com/wassname/42aba7168bb83e278fcfea87e70fa3af
And here is the same script that we modified to be able to use it on multiple GPU for 70B : https://files.catbox.moe/ya4rto.ipynb
Llama3-Unholy-8B-OAS don't have the problem as it was already trained to be less censored, but the OG one was really too much censored.
I will try to redo that soon, as it seems to HAVE WORKED for some prompt (as seen on the log, for exemple) but it's not enough.
32 entry of the dataset is clearly not enough, but it's okay, I really wanted to try that as it was something new.
I could take the Unholy way and retrain the 70B before using OAS but it should work without, that's not the goal. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63ab1241ad514ca8d1430003/d-43TcOxG-zqAbzrH2m7H.png",
"fullname": "Undi",
"name": "Undi95",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3311,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"llama-anon",
"musicurgy",
"Kearm",
"Chief-Inspector",
"Bakanayatsu",
"ucalyptus",
"rooben",
"nschle",
"wassname",
"cosmojg",
"ClayFace",
"thomasgauthier",
"avatar9692",
"Taylor658",
"kaz666",
"ngphuchoangnam",
"vikarti-anatra"
],
"count": 17
}
] | 2024-05-02T20:18:15.000Z | 2024-10-04T19:27:28.932Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/643344526c2a26ae66d5d5b0/VotgxEYpg6YQ_eMwhMqac.jpeg",
"fullname": "dfadf",
"name": "llama-anon",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
},
{
"avatarUrl": "/avatars/72660963812ee19b654fb3111cb7e5ad.svg",
"fullname": "Nicholai",
"name": "nmitchko",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 7,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/655dc641accde1bbc8b41aec/9sR2Mm7mMsyh_SpSH7ilq.jpeg",
"fullname": "Kearm",
"name": "Kearm",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 41,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63ab1241ad514ca8d1430003/d-43TcOxG-zqAbzrH2m7H.png",
"fullname": "Undi",
"name": "Undi95",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3311,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg",
"fullname": "Charles McSneed",
"name": "ChuckMcSneed",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 57,
"isFollowing": false
},
{
"avatarUrl": "/avatars/9d8683646248f2e3856c4b396945d440.svg",
"fullname": "Anon",
"name": "lmg-anon",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 18,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64bb1109aaccfd28b023bcec/fumfSHv9pnW1rMvgQeibP.png",
"fullname": "Matthew Andrews",
"name": "BlueNipples",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 57,
"isFollowing": false
},
{
"avatarUrl": "/avatars/7e313aebaddb3ec752ec48b619464f2c.svg",
"fullname": "wassname",
"name": "wassname",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 9,
"isFollowing": false
},
{
"avatarUrl": "/avatars/476d1cd6acf5e1e79662bb5a4567c292.svg",
"fullname": "Eric Izoita",
"name": "nytopop",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/641dfddf3bae5a77636817c5/2IwNwh9kK98eCHUmOGoWD.png",
"fullname": "wing lian",
"name": "winglian",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 2304,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6548b80bb3a7efb9391e19e8/DYCJL22AOn8kDLQhi9TaW.png",
"fullname": "DreamGen",
"name": "DreamGenX",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 17,
"isFollowing": false
}
] | /posts/Undi95/318385306588047 | 20,592 | 61 |
480601351623058 | [
{
"type": "text",
"value": "one thousand Miku ヾ( ˃ᴗ˂ )◞ • *✰",
"raw": "one thousand Miku ヾ( ˃ᴗ˂ )◞ • *✰",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/parsee-mizuhashi/miku",
"href": null,
"resource": {
"type": "dataset",
"id": "parsee-mizuhashi/miku",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/parsee-mizuhashi/miku",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "by ",
"raw": "by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@parsee-mizuhashi",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "parsee-mizuhashi",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | one thousand Miku ヾ( ˃ᴗ˂ )◞ • *✰
https://huggingface.co/datasets/parsee-mizuhashi/miku
by @parsee-mizuhashi | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg",
"fullname": "Lain",
"name": "not-lain",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 941,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/D-ahNlg_iJz-aT-DiJJ2p.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/UQuePm2wPuCM6xT_jLusw.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/gX1zgC1JuFFrhHk7ZJne3.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/WPJAnzCDoeeuCXb4fu9Y9.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64a6a1cd35c3af3adefafe1b/qRv8U3MGFWLJGn96xGeAj.png",
"fullname": "Parsee Mizuhashi",
"name": "parsee-mizuhashi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 16
}
] | [
{
"reaction": "❤️",
"users": [
"antiven0m",
"samusenps",
"r3gm",
"EloyOn",
"Lewdiculous",
"EveryPizza"
],
"count": 6
},
{
"reaction": "🔥",
"users": [
"Lewdiculous"
],
"count": 1
},
{
"reaction": "🧠",
"users": [
"Lewdiculous"
],
"count": 1
}
] | 2024-05-02T19:28:08.000Z | 2024-05-05T23:29:01.339Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/HOCJv7WE2C4eqqqppDFpA.png",
"fullname": "Lewdiculous (AetherArchitectural)",
"name": "Lewdiculous",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 845,
"isFollowing": false
}
] | /posts/not-lain/480601351623058 | 1,628 | 1 |
491144918451547 | [
{
"type": "text",
"value": "🚀 We recently released datasets 2.19.0! 📦",
"raw": "🚀 We recently released datasets 2.19.0! 📦",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔥 What's New:",
"raw": "🔥 What's New:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Polars integration 🐻❄️",
"raw": "- Polars integration 🐻❄️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- fsspec support for conversion to JSON, CSV, and Parquet",
"raw": "- fsspec support for conversion to JSON, CSV, and Parquet",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Mode parameter for Image feature",
"raw": "- Mode parameter for Image feature",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- CLI function to convert script-datasets to Parquet",
"raw": "- CLI function to convert script-datasets to Parquet",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Dataset.take and Dataset.skip",
"raw": "- Dataset.take and Dataset.skip",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Plus, a bunch of general improvements & bug fixes!",
"raw": "Plus, a bunch of general improvements & bug fixes!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check out the release notes: ",
"raw": "Check out the release notes: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/datasets/releases/tag/2.19.0",
"href": "https://github.com/huggingface/datasets/releases/tag/2.19.0",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Upgrade now and power up your data workflows! 💥",
"raw": "Upgrade now and power up your data workflows! 💥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀 We recently released datasets 2.19.0! 📦
🔥 What's New:
- Polars integration 🐻❄️
- fsspec support for conversion to JSON, CSV, and Parquet
- Mode parameter for Image feature
- CLI function to convert script-datasets to Parquet
- Dataset.take and Dataset.skip
Plus, a bunch of general improvements & bug fixes!
Check out the release notes: https://github.com/huggingface/datasets/releases/tag/2.19.0
Upgrade now and power up your data workflows! 💥 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg",
"fullname": "Albert Villanova del Moral",
"name": "albertvillanova",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 196,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"mmhamdy",
"lucabaggi",
"Taylor658",
"valentimarco",
"davanstrien",
"SilentWraith",
"not-lain",
"akashicmarga",
"louisbrulenaudet",
"Molbap",
"pedi",
"ynakashima"
],
"count": 12
},
{
"reaction": "🔥",
"users": [
"mmhamdy",
"ucalyptus",
"1aurent",
"not-lain",
"Molbap"
],
"count": 5
},
{
"reaction": "❤️",
"users": [
"samusenps",
"not-lain"
],
"count": 2
}
] | 2024-05-02T19:01:54.000Z | 2024-05-02T21:11:59.360Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62a8f56308a7ea93ff1cfef4/L1nb_j5sgVf-foXgGShU4.jpeg",
"fullname": "Luca Baggi",
"name": "lucabaggi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png",
"fullname": "Tom Aarsen",
"name": "tomaarsen",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 1060,
"isFollowing": false
}
] | /posts/albertvillanova/491144918451547 | 1,660 | 2 |
545145381083272 | [
{
"type": "text",
"value": "New model and dataset! The Llama-3-IMPACTS-2x8B-64k-MLX (and upcoming GGUF) model is a cutting-edge large language model trained on the I.M.P.A.C.T.S dataset, which encompasses scenarios from biomimicry, climate change, and theoretical astrobiology. ",
"raw": "New model and dataset! The Llama-3-IMPACTS-2x8B-64k-MLX (and upcoming GGUF) model is a cutting-edge large language model trained on the I.M.P.A.C.T.S dataset, which encompasses scenarios from biomimicry, climate change, and theoretical astrobiology. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/Severian/Llama-3-IMPACTS-2x8B-64k-MLXF",
"href": "https://huggingface.co/Severian/Llama-3-IMPACTS-2x8B-64k-MLXF",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/Severian/IMPACTS",
"href": null,
"resource": {
"type": "dataset",
"id": "Severian/IMPACTS",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/Severian/IMPACTS",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | New model and dataset! The Llama-3-IMPACTS-2x8B-64k-MLX (and upcoming GGUF) model is a cutting-edge large language model trained on the I.M.P.A.C.T.S dataset, which encompasses scenarios from biomimicry, climate change, and theoretical astrobiology.
https://huggingface.co/Severian/Llama-3-IMPACTS-2x8B-64k-MLXF
https://huggingface.co/datasets/Severian/IMPACTS
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64740cf7485a7c8e1bd51ac9/CXZCJm2x4ToT83pEIYyQR.png",
"fullname": "Beckett Dillon",
"name": "Severian",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 175,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64740cf7485a7c8e1bd51ac9/Sf59qXdA1VVXvX5_3eSkD.webp"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"so1omon",
"Srikanth492",
"Ruby-NewLeaf"
],
"count": 3
},
{
"reaction": "🔥",
"users": [
"Ruby-NewLeaf"
],
"count": 1
}
] | 2024-05-02T17:10:27.000Z | 2024-05-02T22:01:33.773Z | [] | /posts/Severian/545145381083272 | 1,665 | 0 |
709423146440881 | [
{
"type": "text",
"value": "✨ Today, we're excited to share the full data processing script used in developing our Sailor models. The repo provides an end-to-end data processing pipeline for LLM training. 🚀",
"raw": "✨ Today, we're excited to share the full data processing script used in developing our Sailor models. The repo provides an end-to-end data processing pipeline for LLM training. 🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "💻Code: ",
"raw": "💻Code: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/sail-sg/sailcraft",
"href": "https://github.com/sail-sg/sailcraft",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤗Model: ",
"raw": "🤗Model: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/sail/sailor-language-models-65e19a749f978976f1959825",
"href": null,
"resource": {
"type": "collection",
"id": "sail/sailor-language-models-65e19a749f978976f1959825",
"discussionNum": null
},
"url": "https://huggingface.co/collections/sail/sailor-language-models-65e19a749f978976f1959825",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📜Paper: ",
"raw": "📜Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.03608",
"href": null,
"resource": {
"type": "paper",
"id": "2404.03608",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.03608",
"code": null,
"user": null,
"label": "Sailor: Open Language Models for South-East Asia (2404.03608)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌐Homepage: ",
"raw": "🌐Homepage: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://sailorllm.github.io",
"href": "https://sailorllm.github.io",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "# Overview 🔍",
"raw": "# Overview 🔍",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The pipeline consists of 4 stages🧹:",
"raw": "The pipeline consists of 4 stages🧹:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1️⃣ Initial data cleaning",
"raw": "1️⃣ Initial data cleaning",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2️⃣ Near deduplication",
"raw": "2️⃣ Near deduplication",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "3️⃣ Exact deduplication",
"raw": "3️⃣ Exact deduplication",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "4️⃣ Second round of data cleaning",
"raw": "4️⃣ Second round of data cleaning",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "A special focus was given to the data cleaning part of South-East Asian (SEA) languages🌍",
"raw": "A special focus was given to the data cleaning part of South-East Asian (SEA) languages🌍",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "# Use Case ✨",
"raw": "# Use Case ✨",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "With this codebase, you can clean your own dataset with:",
"raw": "With this codebase, you can clean your own dataset with:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ Get filtered data counts after each processing stage",
"raw": "✅ Get filtered data counts after each processing stage",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ Easily configure language-specific cleaning rules (we support Arabic, Bengali, Catalan, Spanish, Basque, French, Hindi, Portuguese, Urdu, and optimize for English, Indonesian, Vietnamese, Chinese, Thai, Lao, Malay)",
"raw": "✅ Easily configure language-specific cleaning rules (we support Arabic, Bengali, Catalan, Spanish, Basque, French, Hindi, Portuguese, Urdu, and optimize for English, Indonesian, Vietnamese, Chinese, Thai, Lao, Malay)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ Investigate what data was removed at each processing stage",
"raw": "✅ Investigate what data was removed at each processing stage",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "# Acknowledgement 🙏",
"raw": "# Acknowledgement 🙏",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The main credit goes to ",
"raw": "The main credit goes to ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@dreamerdeo",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "dreamerdeo",
"label": null,
"lang": null
},
{
"type": "text",
"value": " , the first author of our Sailor paper ❤️! He put in tremendous effort on the data processing pipeline, enabling the model's great performance. We believe the mini repo will be a valuable resource for researchers working on dataset curation for large language models. 🎉",
"raw": " , the first author of our Sailor paper ❤️! He put in tremendous effort on the data processing pipeline, enabling the model's great performance. We believe the mini repo will be a valuable resource for researchers working on dataset curation for large language models. 🎉",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Sharing the recipe openly aligns with our commitment to open language model development. 💪 And this repo would not have been possible without the contributions from the open community, including the BigScience data cleaning tool, the all-in-one deduplication tool by ",
"raw": "Sharing the recipe openly aligns with our commitment to open language model development. 💪 And this repo would not have been possible without the contributions from the open community, including the BigScience data cleaning tool, the all-in-one deduplication tool by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@chenghao",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "chenghao",
"label": null,
"lang": null
},
{
"type": "text",
"value": " , and the deduplication project from Google. 🧠",
"raw": " , and the deduplication project from Google. 🧠",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "# What's Next 🚀",
"raw": "# What's Next 🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Share your thoughts or leave any comments on what you'd like the Sailor models to do! We also have some exciting news coming soon, and please stay tuned. 🚄",
"raw": "Share your thoughts or leave any comments on what you'd like the Sailor models to do! We also have some exciting news coming soon, and please stay tuned. 🚄",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | ✨ Today, we're excited to share the full data processing script used in developing our Sailor models. The repo provides an end-to-end data processing pipeline for LLM training. 🚀
💻Code: https://github.com/sail-sg/sailcraft
🤗Model: https://huggingface.co/collections/sail/sailor-language-models-65e19a749f978976f1959825
📜Paper: https://huggingface.co/papers/2404.03608
🌐Homepage: https://sailorllm.github.io
# Overview 🔍
The pipeline consists of 4 stages🧹:
1️⃣ Initial data cleaning
2️⃣ Near deduplication
3️⃣ Exact deduplication
4️⃣ Second round of data cleaning
A special focus was given to the data cleaning part of South-East Asian (SEA) languages🌍
# Use Case ✨
With this codebase, you can clean your own dataset with:
✅ Get filtered data counts after each processing stage
✅ Easily configure language-specific cleaning rules (we support Arabic, Bengali, Catalan, Spanish, Basque, French, Hindi, Portuguese, Urdu, and optimize for English, Indonesian, Vietnamese, Chinese, Thai, Lao, Malay)
✅ Investigate what data was removed at each processing stage
# Acknowledgement 🙏
The main credit goes to @dreamerdeo , the first author of our Sailor paper ❤️! He put in tremendous effort on the data processing pipeline, enabling the model's great performance. We believe the mini repo will be a valuable resource for researchers working on dataset curation for large language models. 🎉
Sharing the recipe openly aligns with our commitment to open language model development. 💪 And this repo would not have been possible without the contributions from the open community, including the BigScience data cleaning tool, the all-in-one deduplication tool by @chenghao , and the deduplication project from Google. 🧠
# What's Next 🚀
Share your thoughts or leave any comments on what you'd like the Sailor models to do! We also have some exciting news coming soon, and please stay tuned. 🚄
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/612ee6a7b960e78c6d2319d4/2Hu9BaAyXbyh1vt0v1Qui.jpeg",
"fullname": "Qian Liu",
"name": "SivilTaram",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 64,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1638743128077-607a5b44489fc71534e91c0e.jpeg",
"fullname": "Chenghao Mou",
"name": "chenghao",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 17
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1676272806694-6214e4ee1e35c843d42d1f88.jpeg",
"fullname": "Longxu Dou",
"name": "dreamerdeo",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 9
}
] | [
{
"reaction": "🔥",
"users": [
"mmhamdy",
"dreamerdeo",
"ToluClassics",
"chenghao"
],
"count": 4
},
{
"reaction": "🚀",
"users": [
"mmhamdy",
"dreamerdeo",
"aloobun"
],
"count": 3
},
{
"reaction": "👍",
"users": [
"szh",
"TheMrguiller",
"Huiyu"
],
"count": 3
}
] | 2024-05-02T15:33:55.000Z | 2024-05-02T15:35:09.114Z | [] | /posts/SivilTaram/709423146440881 | 1,715 | 0 |
984220331032007 | [
{
"type": "text",
"value": "#Previous Version / Older",
"raw": "#Previous Version / Older",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📂Huggingface for Android ➡️",
"raw": "📂Huggingface for Android ➡️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🪶Median ( Go Native ) Plugin :",
"raw": "🪶Median ( Go Native ) Plugin :",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "version 0.0.1",
"raw": "version 0.0.1",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀 ",
"raw": "🚀 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/prithivMLmods/Huggingface-Android-App",
"href": null,
"resource": {
"type": "space",
"id": "prithivMLmods/Huggingface-Android-App",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/prithivMLmods/Huggingface-Android-App",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | #Previous Version / Older
📂Huggingface for Android ➡️
🪶Median ( Go Native ) Plugin :
version 0.0.1
🚀 https://huggingface.co/spaces/prithivMLmods/Huggingface-Android-App | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/UVtVbF_3rdt0DC8xTkpL1.jpeg",
"fullname": "Prithiv Sakthi",
"name": "prithivMLmods",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 393,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ZpnwWi_zKcpiyAsGFTLPV.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"jeanflop",
"samusenps",
"Taylor658",
"ucalyptus",
"raincandy-u",
"AlekseiPravdin",
"dillfrescott",
"pedi",
"Ramikan-BR",
"prithivMLmods",
"Ngrthm"
],
"count": 11
},
{
"reaction": "🤗",
"users": [
"raincandy-u",
"pedi",
"prithivMLmods"
],
"count": 3
}
] | 2024-05-02T15:18:42.000Z | 2024-05-31T17:29:01.809Z | [] | /posts/prithivMLmods/984220331032007 | 2,321 | 0 |
197550489959526 | [
{
"type": "text",
"value": "A Careful Examination of Large Language Model Performance on Grade School Arithmetic",
"raw": "A Careful Examination of Large Language Model Performance on Grade School Arithmetic",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2405.00332",
"href": null,
"resource": {
"type": "paper",
"id": "2405.00332",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2405.00332",
"code": null,
"user": null,
"label": "A Careful Examination of Large Language Model Performance on Grade\n School Arithmetic (2405.00332)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Large language models (LLMs) have achieved impressive success on many benchmarks for mathematical reasoning. However, there is growing concern that some of this performance actually reflects dataset contamination, where data closely resembling benchmark questions leaks into the training data, instead of true reasoning ability. To investigate this claim rigorously, we commission Grade School Math 1000 (GSM1k). GSM1k is designed to mirror the style and complexity of the established GSM8k benchmark, the gold standard for measuring elementary mathematical reasoning. We ensure that the two benchmarks are comparable across important metrics such as human solve rates, number of steps in solution, answer magnitude, and more. When evaluating leading open- and closed-source LLMs on GSM1k, we observe accuracy drops of up to 13%, with several families of models (e.g., Phi and Mistral) showing evidence of systematic overfitting across almost all model sizes. At the same time, many models, especially those on the frontier, (e.g., Gemini/GPT/Claude) show minimal signs of overfitting. Further analysis suggests a positive relationship (Spearman's r^2=0.32) between a model's probability of generating an example from GSM8k and its performance gap between GSM8k and GSM1k, suggesting that many models may have partially memorized GSM8k.",
"raw": "Large language models (LLMs) have achieved impressive success on many benchmarks for mathematical reasoning. However, there is growing concern that some of this performance actually reflects dataset contamination, where data closely resembling benchmark questions leaks into the training data, instead of true reasoning ability. To investigate this claim rigorously, we commission Grade School Math 1000 (GSM1k). GSM1k is designed to mirror the style and complexity of the established GSM8k benchmark, the gold standard for measuring elementary mathematical reasoning. We ensure that the two benchmarks are comparable across important metrics such as human solve rates, number of steps in solution, answer magnitude, and more. When evaluating leading open- and closed-source LLMs on GSM1k, we observe accuracy drops of up to 13%, with several families of models (e.g., Phi and Mistral) showing evidence of systematic overfitting across almost all model sizes. At the same time, many models, especially those on the frontier, (e.g., Gemini/GPT/Claude) show minimal signs of overfitting. Further analysis suggests a positive relationship (Spearman's r^2=0.32) between a model's probability of generating an example from GSM8k and its performance gap between GSM8k and GSM1k, suggesting that many models may have partially memorized GSM8k.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | A Careful Examination of Large Language Model Performance on Grade School Arithmetic
https://huggingface.co/papers/2405.00332
Large language models (LLMs) have achieved impressive success on many benchmarks for mathematical reasoning. However, there is growing concern that some of this performance actually reflects dataset contamination, where data closely resembling benchmark questions leaks into the training data, instead of true reasoning ability. To investigate this claim rigorously, we commission Grade School Math 1000 (GSM1k). GSM1k is designed to mirror the style and complexity of the established GSM8k benchmark, the gold standard for measuring elementary mathematical reasoning. We ensure that the two benchmarks are comparable across important metrics such as human solve rates, number of steps in solution, answer magnitude, and more. When evaluating leading open- and closed-source LLMs on GSM1k, we observe accuracy drops of up to 13%, with several families of models (e.g., Phi and Mistral) showing evidence of systematic overfitting across almost all model sizes. At the same time, many models, especially those on the frontier, (e.g., Gemini/GPT/Claude) show minimal signs of overfitting. Further analysis suggests a positive relationship (Spearman's r^2=0.32) between a model's probability of generating an example from GSM8k and its performance gap between GSM8k and GSM1k, suggesting that many models may have partially memorized GSM8k. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg",
"fullname": "AK",
"name": "akhaliq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5205,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/OwadIDQacGV5mLSqOl-sB.png"
}
] | [] | [] | 2024-05-02T14:54:39.000Z | 2024-05-02T14:54:39.387Z | [] | /posts/akhaliq/197550489959526 | 6,215 | 0 |
752957768027152 | [
{
"type": "text",
"value": "🚀 Just released version 0.23.0 of the huggingface_hub Python library!",
"raw": "🚀 Just released version 0.23.0 of the huggingface_hub Python library!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Exciting updates include:",
"raw": "Exciting updates include:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📁 Seamless download to local dir!",
"raw": "📁 Seamless download to local dir!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "💡 Grammar and Tools in InferenceClient!",
"raw": "💡 Grammar and Tools in InferenceClient!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌐 Documentation full translated to Korean!",
"raw": "🌐 Documentation full translated to Korean!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👥 User API: get likes, upvotes, nb of repos, etc.!",
"raw": "👥 User API: get likes, upvotes, nb of repos, etc.!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🧩 Better model cards and encoding for ModelHubMixin!",
"raw": "🧩 Better model cards and encoding for ModelHubMixin!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check out the full release notes for more details: ",
"raw": "Check out the full release notes for more details: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/Wauplin/huggingface_hub/discussions/6",
"href": null,
"resource": {
"type": "space",
"id": "Wauplin/huggingface_hub",
"discussionNum": 6
},
"url": "https://huggingface.co/spaces/Wauplin/huggingface_hub/discussions/6",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " 👀",
"raw": " 👀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀 Just released version 0.23.0 of the huggingface_hub Python library!
Exciting updates include:
📁 Seamless download to local dir!
💡 Grammar and Tools in InferenceClient!
🌐 Documentation full translated to Korean!
👥 User API: get likes, upvotes, nb of repos, etc.!
🧩 Better model cards and encoding for ModelHubMixin!
Check out the full release notes for more details:
https://huggingface.co/spaces/Wauplin/huggingface_hub/discussions/6
👀 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1659336880158-6273f303f6d63a28483fde12.png",
"fullname": "Lucain Pouget",
"name": "Wauplin",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 157,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"severo",
"tomaarsen",
"Lewdiculous",
"julien-c",
"albertvillanova",
"mmhamdy",
"radames",
"davanstrien",
"Molbap",
"not-lain"
],
"count": 10
},
{
"reaction": "❤️",
"users": [
"afrideva",
"Molbap",
"not-lain"
],
"count": 3
}
] | 2024-05-02T13:04:58.000Z | 2024-05-02T13:33:42.129Z | [] | /posts/Wauplin/752957768027152 | 1,821 | 0 |
227454808084545 | [
{
"type": "mention",
"value": null,
"raw": "@VanshGehlot",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "VanshGehlot",
"label": null,
"lang": null
},
{
"type": "text",
"value": " could upgrade help Me with this ",
"raw": " could upgrade help Me with this ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | @VanshGehlot could upgrade help Me with this | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/662b175d6015fb0364522bf8/VRxeDpMIdIW4SGg6FRWzt.jpeg",
"fullname": "Mykiea mcafee ",
"name": "Twilight02",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/662b175d6015fb0364522bf8/CAWFAEBf3mezTMoTynfL_.jpeg"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63a7183ee27a6dbd485cff9f/AelXC9zlVFWXS30XRAdTL.png",
"fullname": "Vansh Gehlot",
"name": "VanshGehlot",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 8
}
] | [] | 2024-05-02T12:32:26.000Z | 2024-05-02T12:32:26.793Z | [] | /posts/Twilight02/227454808084545 | 1,420 | 0 |
302261957883873 | [
{
"type": "text",
"value": "Full parameter fine-tuning of the LLaMA-3 8B model using a single GTX 3090 GPU with 24GB of graphics memory? ",
"raw": "Full parameter fine-tuning of the LLaMA-3 8B model using a single GTX 3090 GPU with 24GB of graphics memory? ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Please check out our tool for fine-tuning, inferencing, and evaluating GreenBitAI's low-bit LLMs: ",
"raw": "Please check out our tool for fine-tuning, inferencing, and evaluating GreenBitAI's low-bit LLMs: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/GreenBitAI/green-bit-llm",
"href": "https://github.com/GreenBitAI/green-bit-llm",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Model Zoo:",
"raw": "Model Zoo:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/GreenBitAI",
"href": "https://huggingface.co/GreenBitAI",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Full parameter fine-tuning of the LLaMA-3 8B model using a single GTX 3090 GPU with 24GB of graphics memory?
Please check out our tool for fine-tuning, inferencing, and evaluating GreenBitAI's low-bit LLMs:
https://github.com/GreenBitAI/green-bit-llm
Model Zoo:
https://huggingface.co/GreenBitAI | {
"avatarUrl": "/avatars/8c8a0055f62bd2a12f5cc6eb9a50b7f3.svg",
"fullname": "Haojin Yang",
"name": "yanghaojin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/648317e60d94eb5528a0aa75/DaD1SXRJWG08EKVmfH5v4.gif"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"yanghaojin",
"li-ping",
"Xorg2024",
"Ksgk-fy",
"ross27",
"adwidianjaya",
"GPT007",
"apolloparty"
],
"count": 8
},
{
"reaction": "🚀",
"users": [
"yanghaojin",
"Taylor658"
],
"count": 2
},
{
"reaction": "👍",
"users": [
"themex1380",
"oliverguhr"
],
"count": 2
}
] | 2024-05-02T11:15:13.000Z | 2024-05-10T05:50:01.280Z | [
{
"avatarUrl": "/avatars/8c8a0055f62bd2a12f5cc6eb9a50b7f3.svg",
"fullname": "Haojin Yang",
"name": "yanghaojin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
},
{
"avatarUrl": "/avatars/880a59e31ca263923bdbfea1149170eb.svg",
"fullname": "Jamshidkhan",
"name": "jqodiriy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/yanghaojin/302261957883873 | 2,108 | 3 |
592013287129154 | [
{
"type": "text",
"value": "🔥Motion Latent Consistency Model🔥",
"raw": "🔥Motion Latent Consistency Model🔥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Introducing MotionLCM💃, controlling and generating a motion in milliseconds! ",
"raw": "Introducing MotionLCM💃, controlling and generating a motion in milliseconds! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Huggingface Space: ",
"raw": "Huggingface Space: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/wxDai/MotionLCM",
"href": null,
"resource": {
"type": "space",
"id": "wxDai/MotionLCM",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/wxDai/MotionLCM",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Huggingface Paper: ",
"raw": "Huggingface Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.19759",
"href": null,
"resource": {
"type": "paper",
"id": "2404.19759",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.19759",
"code": null,
"user": null,
"label": "MotionLCM: Real-time Controllable Motion Generation via Latent\n Consistency Model (2404.19759)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Project page: ",
"raw": "Project page: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://dai-wenxun.github.io/MotionLCM-page/",
"href": "https://dai-wenxun.github.io/MotionLCM-page/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Paper: ",
"raw": "Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/pdf/2404.19759.pdf",
"href": "https://arxiv.org/pdf/2404.19759.pdf",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Code: ",
"raw": "Code: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/Dai-Wenxun/MotionLCM",
"href": "https://github.com/Dai-Wenxun/MotionLCM",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "video: ",
"raw": "video: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.youtube.com/watch?v=BhrGmJYaRE4",
"href": "https://www.youtube.com/watch?v=BhrGmJYaRE4",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "MotionLCM supports inference pipelines of 1-4 steps, with almost no difference in effectiveness between 1 and 4 steps. Generating approximately 200 frames of motion only takes about 30ms, which averages to approximately 6k fps per frame.",
"raw": "MotionLCM supports inference pipelines of 1-4 steps, with almost no difference in effectiveness between 1 and 4 steps. Generating approximately 200 frames of motion only takes about 30ms, which averages to approximately 6k fps per frame.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Our MotionLCM can achieve high-quality text-to-motion and precise motion control results (both sparse and dense conditions) in ∼30 ms.",
"raw": "Our MotionLCM can achieve high-quality text-to-motion and precise motion control results (both sparse and dense conditions) in ∼30 ms.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We integrated a control module into the diffusion of the latent space, named Motion ControlNet, to achieve controllable motion generation. Our control algorithm is approximately 1,000 times faster than the best-performing baseline, with comparable quality.",
"raw": "We integrated a control module into the diffusion of the latent space, named Motion ControlNet, to achieve controllable motion generation. Our control algorithm is approximately 1,000 times faster than the best-performing baseline, with comparable quality.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔥Motion Latent Consistency Model🔥
Introducing MotionLCM💃, controlling and generating a motion in milliseconds!
Huggingface Space:
https://huggingface.co/spaces/wxDai/MotionLCM
Huggingface Paper:
https://huggingface.co/papers/2404.19759
Project page: https://dai-wenxun.github.io/MotionLCM-page/
Paper: https://arxiv.org/pdf/2404.19759.pdf
Code: https://github.com/Dai-Wenxun/MotionLCM
video: https://www.youtube.com/watch?v=BhrGmJYaRE4
MotionLCM supports inference pipelines of 1-4 steps, with almost no difference in effectiveness between 1 and 4 steps. Generating approximately 200 frames of motion only takes about 30ms, which averages to approximately 6k fps per frame.
Our MotionLCM can achieve high-quality text-to-motion and precise motion control results (both sparse and dense conditions) in ∼30 ms.
We integrated a control module into the diffusion of the latent space, named Motion ControlNet, to achieve controllable motion generation. Our control algorithm is approximately 1,000 times faster than the best-performing baseline, with comparable quality. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6165514db5ec555e8e9c203b/mBdEDqRcuzIOEf3Okq6y7.png",
"fullname": "Dai-Wenxun",
"name": "wxDai",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6165514db5ec555e8e9c203b/Wdu7CubZFLOVBZzGVFspF.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"AdinaY",
"YaTharThShaRma999",
"ross27",
"samusenps",
"Joseph717171",
"KingNish",
"victor"
],
"count": 7
},
{
"reaction": "🚀",
"users": [
"AdinaY",
"samusenps",
"Joseph717171",
"victor"
],
"count": 4
},
{
"reaction": "❤️",
"users": [
"samusenps",
"Joseph717171",
"dillfrescott"
],
"count": 3
}
] | 2024-05-02T11:01:24.000Z | 2024-05-03T03:32:45.319Z | [
{
"avatarUrl": "/avatars/c82779fdf94f80cdb5020504f83c818b.svg",
"fullname": "Yatharth Sharma",
"name": "YaTharThShaRma999",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 14,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6165514db5ec555e8e9c203b/mBdEDqRcuzIOEf3Okq6y7.png",
"fullname": "Dai-Wenxun",
"name": "wxDai",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
}
] | /posts/wxDai/592013287129154 | 1,474 | 2 |
759843110601726 | [
{
"type": "text",
"value": "Custom pipelines and components in Diffusers 🎸",
"raw": "Custom pipelines and components in Diffusers 🎸",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Wanted to use customized pipelines and other components (schedulers, unets, text encoders, etc.) in Diffusers?",
"raw": "Wanted to use customized pipelines and other components (schedulers, unets, text encoders, etc.) in Diffusers?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Found it inflexible? ",
"raw": "Found it inflexible? ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Since the first dawn on earth, we have supported loading custom pipelines via a ",
"raw": "Since the first dawn on earth, we have supported loading custom pipelines via a ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`custom_pipeline`",
"href": null,
"resource": null,
"url": null,
"code": "custom_pipeline",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " argument 🌄",
"raw": " argument 🌄",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "These pipelines are inference-only, i.e., the assumption is that we're leveraging an existing checkpoint (e.g., runwayml/stable-diffusion-v1-5) and ONLY modifying the pipeline implementation. ",
"raw": "These pipelines are inference-only, i.e., the assumption is that we're leveraging an existing checkpoint (e.g., runwayml/stable-diffusion-v1-5) and ONLY modifying the pipeline implementation. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We have many cool pipelines, implemented that way. They all share the same benefits available to a ",
"raw": "We have many cool pipelines, implemented that way. They all share the same benefits available to a ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`DiffusionPipeline`",
"href": null,
"resource": null,
"url": null,
"code": "DiffusionPipeline",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ", no compromise there 🤗",
"raw": ", no compromise there 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check them here:",
"raw": "Check them here:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/diffusers/tree/main/examples/community",
"href": "https://github.com/huggingface/diffusers/tree/main/examples/community",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Then we might have a requirement of everything customized i.e., custom components along with a custom pipeline. Sure, that's all possible. ",
"raw": "Then we might have a requirement of everything customized i.e., custom components along with a custom pipeline. Sure, that's all possible. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "All you have to do is keep the implementations of those custom components on the Hub repository you're loading your pipeline checkpoint from.",
"raw": "All you have to do is keep the implementations of those custom components on the Hub repository you're loading your pipeline checkpoint from.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "SDXL Japanese was implemented like this 🔥",
"raw": "SDXL Japanese was implemented like this 🔥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/stabilityai/japanese-stable-diffusion-xl",
"href": null,
"resource": {
"type": "model",
"id": "stabilityai/japanese-stable-diffusion-xl",
"discussionNum": null
},
"url": "https://huggingface.co/stabilityai/japanese-stable-diffusion-xl",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Full guide is available here ⬇️",
"raw": "Full guide is available here ⬇️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/docs/diffusers/main/en/using-diffusers/custom_pipeline_overview",
"href": "https://huggingface.co/docs/diffusers/main/en/using-diffusers/custom_pipeline_overview",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "And, of course, these share all the benefits that come with ",
"raw": "And, of course, these share all the benefits that come with ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`DiffusionPipeline`",
"href": null,
"resource": null,
"url": null,
"code": "DiffusionPipeline",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". ",
"raw": ". ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Custom pipelines and components in Diffusers 🎸
Wanted to use customized pipelines and other components (schedulers, unets, text encoders, etc.) in Diffusers?
Found it inflexible?
Since the first dawn on earth, we have supported loading custom pipelines via a `custom_pipeline` argument 🌄
These pipelines are inference-only, i.e., the assumption is that we're leveraging an existing checkpoint (e.g., runwayml/stable-diffusion-v1-5) and ONLY modifying the pipeline implementation.
We have many cool pipelines, implemented that way. They all share the same benefits available to a `DiffusionPipeline`, no compromise there 🤗
Check them here:
https://github.com/huggingface/diffusers/tree/main/examples/community
Then we might have a requirement of everything customized i.e., custom components along with a custom pipeline. Sure, that's all possible.
All you have to do is keep the implementations of those custom components on the Hub repository you're loading your pipeline checkpoint from.
SDXL Japanese was implemented like this 🔥
https://huggingface.co/stabilityai/japanese-stable-diffusion-xl
Full guide is available here ⬇️
https://huggingface.co/docs/diffusers/main/en/using-diffusers/custom_pipeline_overview
And, of course, these share all the benefits that come with `DiffusionPipeline`. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1649681653581-5f7fbd813e94f16a85448745.jpeg",
"fullname": "Sayak Paul",
"name": "sayakpaul",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 459,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"radames"
],
"count": 1
}
] | 2024-05-02T09:49:19.000Z | 2024-05-02T09:49:19.966Z | [] | /posts/sayakpaul/759843110601726 | 2,015 | 0 |
993950080124134 | [
{
"type": "text",
"value": "When I read the KAN paper, I see physicists casually making fun of the uncertainties in MLPs or Neural nets as a whole:",
"raw": "When I read the KAN paper, I see physicists casually making fun of the uncertainties in MLPs or Neural nets as a whole:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- \"The philosophy here is close to the mindset of physicists, who often care more about typical cases rather than worst cases\" lol this went hard on NNs",
"raw": "- \"The philosophy here is close to the mindset of physicists, who often care more about typical cases rather than worst cases\" lol this went hard on NNs",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- \"Finite grid size can approximate the function well with a residue rate independent of the dimension, hence beating curse of dimensionality!\" haha.",
"raw": "- \"Finite grid size can approximate the function well with a residue rate independent of the dimension, hence beating curse of dimensionality!\" haha.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- \"Neural scaling laws are the phenomenon where test loss decreases with more model parameters\"",
"raw": "- \"Neural scaling laws are the phenomenon where test loss decreases with more model parameters\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- \"Our approach, which assumes the existence of smooth Kolmogorov Arnold representations, decomposes the high-dimensional function into several 1D functions\"",
"raw": "- \"Our approach, which assumes the existence of smooth Kolmogorov Arnold representations, decomposes the high-dimensional function into several 1D functions\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Key Differences With MLPs:",
"raw": "Key Differences With MLPs:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Activation Functions: Unlike MLPs that use fixed activation functions at the nodes, KANs utilize learnable activation functions located on the edges between nodes.",
"raw": "- Activation Functions: Unlike MLPs that use fixed activation functions at the nodes, KANs utilize learnable activation functions located on the edges between nodes.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Weight Parameters: In KANs, traditional linear weight matrices are absent. Instead, each weight parameter is replaced by a learnable univariate function, specifically a spline. ",
"raw": "- Weight Parameters: In KANs, traditional linear weight matrices are absent. Instead, each weight parameter is replaced by a learnable univariate function, specifically a spline. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Summation Nodes: Nodes in KANs perform simple summation of incoming signals without applying non-linear transformations.",
"raw": "- Summation Nodes: Nodes in KANs perform simple summation of incoming signals without applying non-linear transformations.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Advantages Over MLPs:",
"raw": "Advantages Over MLPs:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Accuracy: achieve higher accuracy with smaller network sizes compared to larger MLPs in tasks like data fitting and solving partial differential equations (PDEs).",
"raw": "- Accuracy: achieve higher accuracy with smaller network sizes compared to larger MLPs in tasks like data fitting and solving partial differential equations (PDEs).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Interpretability: Due to their unique structure, KANs are more interpretable than MLPs. ",
"raw": "- Interpretability: Due to their unique structure, KANs are more interpretable than MLPs. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Technical Innovations:",
"raw": "Technical Innovations:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Learnable Edges: learnable functions on network edges presents a novel approach to network design, providing greater flexibility in modeling complex relationships in data.",
"raw": "- Learnable Edges: learnable functions on network edges presents a novel approach to network design, providing greater flexibility in modeling complex relationships in data.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- No Linear Weights: elimination of linear weights reduces the parameters, and potentially simplifies the learning process, focusing on the optimization of univariate function representations.",
"raw": "- No Linear Weights: elimination of linear weights reduces the parameters, and potentially simplifies the learning process, focusing on the optimization of univariate function representations.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Applications and Practical Use:",
"raw": "Applications and Practical Use:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Scientific Collaboration: KANs have been applied in scientific settings as tools to help discover or rediscover math",
"raw": "- Scientific Collaboration: KANs have been applied in scientific settings as tools to help discover or rediscover math",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | When I read the KAN paper, I see physicists casually making fun of the uncertainties in MLPs or Neural nets as a whole:
- "The philosophy here is close to the mindset of physicists, who often care more about typical cases rather than worst cases" lol this went hard on NNs
- "Finite grid size can approximate the function well with a residue rate independent of the dimension, hence beating curse of dimensionality!" haha.
- "Neural scaling laws are the phenomenon where test loss decreases with more model parameters"
- "Our approach, which assumes the existence of smooth Kolmogorov Arnold representations, decomposes the high-dimensional function into several 1D functions"
Key Differences With MLPs:
- Activation Functions: Unlike MLPs that use fixed activation functions at the nodes, KANs utilize learnable activation functions located on the edges between nodes.
- Weight Parameters: In KANs, traditional linear weight matrices are absent. Instead, each weight parameter is replaced by a learnable univariate function, specifically a spline.
- Summation Nodes: Nodes in KANs perform simple summation of incoming signals without applying non-linear transformations.
Advantages Over MLPs:
- Accuracy: achieve higher accuracy with smaller network sizes compared to larger MLPs in tasks like data fitting and solving partial differential equations (PDEs).
- Interpretability: Due to their unique structure, KANs are more interpretable than MLPs.
Technical Innovations:
- Learnable Edges: learnable functions on network edges presents a novel approach to network design, providing greater flexibility in modeling complex relationships in data.
- No Linear Weights: elimination of linear weights reduces the parameters, and potentially simplifies the learning process, focusing on the optimization of univariate function representations.
Applications and Practical Use:
- Scientific Collaboration: KANs have been applied in scientific settings as tools to help discover or rediscover math | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/3GA4jo0Izle8HhoQ9-SQz.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/ge_CRGWxD6Lke1uCzmMpP.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/1NpUgn58TY4vAp4O4XyCy.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/KF3cQUQeGNN839wrYzgln.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/H4DSqH-tHpvzX_lSCafw-.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/1audHld2A1x1CR8xS7Yfe.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/d_SNryxSOyXWfgmnLTwRL.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"eramax",
"damerajee",
"Benyamin369",
"Ramikan-BR"
],
"count": 4
},
{
"reaction": "❤️",
"users": [
"gsarti",
"thomwolf"
],
"count": 2
},
{
"reaction": "🚀",
"users": [
"Ramikan-BR"
],
"count": 1
}
] | 2024-05-02T06:56:54.000Z | 2024-05-02T07:09:48.006Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
}
] | /posts/Jaward/993950080124134 | 1,533 | 1 |
740425143849714 | [
{
"type": "text",
"value": "TGI v2.0.2 is out!",
"raw": "TGI v2.0.2 is out!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- New models (idefics2, phi3)",
"raw": "- New models (idefics2, phi3)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Cleaner VLM support in the openai layer",
"raw": "- Cleaner VLM support in the openai layer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Upgraded to pytorch 2.3.0",
"raw": "- Upgraded to pytorch 2.3.0",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2",
"href": "https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Kudos ",
"raw": "Kudos ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Narsil",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Narsil",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@olivierdehaene",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "olivierdehaene",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@drbh",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "drbh",
"label": null,
"lang": null
},
{
"type": "text",
"value": " and so many contributors!",
"raw": " and so many contributors!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | TGI v2.0.2 is out!
- New models (idefics2, phi3)
- Cleaner VLM support in the openai layer
- Upgraded to pytorch 2.3.0
https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2
Kudos @Narsil @olivierdehaene @drbh and so many contributors!
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1605114051380-noauth.jpeg",
"fullname": "Jeff Boudier",
"name": "jeffboudier",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 195,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6452d5ba3f80ad88c77b2f05/UoWKya6ClFzeXaMB_vgoy.jpeg",
"fullname": "David Holtz",
"name": "drbh",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 13
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1608285816082-5e2967b819407e3277369b95.png",
"fullname": "Nicolas Patry",
"name": "Narsil",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 101
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62a093d63e7d1dda047039fc/QGpVSKuJLwl2EsiffCYML.jpeg",
"fullname": "Olivier Dehaene",
"name": "olivierdehaene",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 78
}
] | [
{
"reaction": "🚀",
"users": [
"sosoai",
"Narsil",
"VictorSanh",
"tomaarsen",
"radames",
"Alagib",
"dawengmerak18",
"clboetticher"
],
"count": 8
},
{
"reaction": "👍",
"users": [
"dawengmerak18",
"NHLOCAL"
],
"count": 2
}
] | 2024-05-02T06:36:39.000Z | 2024-05-02T06:36:39.142Z | [] | /posts/jeffboudier/740425143849714 | 1,682 | 0 |
556933203881858 | [
{
"type": "text",
"value": "Okay, first pass over KAN: Kolmogorov–Arnold Networks, it looks very interesting!",
"raw": "Okay, first pass over KAN: Kolmogorov–Arnold Networks, it looks very interesting!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Interpretability of KAN model: ",
"raw": "Interpretability of KAN model: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "May be considered mostly as a safety issue these days, but it can also be used as a form of interaction between the user and a model, as this paper argues and I think they make a valid point here. With MLP, we only interact with the outputs, but KAN is an entirely different paradigm and I find it compelling.",
"raw": "May be considered mostly as a safety issue these days, but it can also be used as a form of interaction between the user and a model, as this paper argues and I think they make a valid point here. With MLP, we only interact with the outputs, but KAN is an entirely different paradigm and I find it compelling.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Scalability: ",
"raw": "Scalability: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "KAN shows better parameter efficiency than MLP. This likely translates also to needing less data. We're already at the point with the frontier LLMs where all the data available from the internet is used + more is made synthetically...so we kind of need something better.",
"raw": "KAN shows better parameter efficiency than MLP. This likely translates also to needing less data. We're already at the point with the frontier LLMs where all the data available from the internet is used + more is made synthetically...so we kind of need something better.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Continual learning:",
"raw": "Continual learning:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "KAN can handle new input information w/o catastrophic forgetting, which helps to keep a model up to date without relying on some database or retraining.",
"raw": "KAN can handle new input information w/o catastrophic forgetting, which helps to keep a model up to date without relying on some database or retraining.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Sequential data:",
"raw": "Sequential data:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This is probably what most people are curious about right now, and KANs are not shown to work with sequential data yet and it's unclear what the best approach might be to make it work well both in training and regarding the interpretability aspect. That said, there's a rich long history of achieving sequential data in variety of ways, so I don't think getting the ball rolling here would be too challenging. ",
"raw": "This is probably what most people are curious about right now, and KANs are not shown to work with sequential data yet and it's unclear what the best approach might be to make it work well both in training and regarding the interpretability aspect. That said, there's a rich long history of achieving sequential data in variety of ways, so I don't think getting the ball rolling here would be too challenging. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Mostly, I just love a new paradigm and I want to see more!",
"raw": "Mostly, I just love a new paradigm and I want to see more!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.19756",
"href": null,
"resource": {
"type": "paper",
"id": "2404.19756",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.19756",
"code": null,
"user": null,
"label": "KAN: Kolmogorov-Arnold Networks (2404.19756)",
"lang": null
}
] | Okay, first pass over KAN: Kolmogorov–Arnold Networks, it looks very interesting!
Interpretability of KAN model:
May be considered mostly as a safety issue these days, but it can also be used as a form of interaction between the user and a model, as this paper argues and I think they make a valid point here. With MLP, we only interact with the outputs, but KAN is an entirely different paradigm and I find it compelling.
Scalability:
KAN shows better parameter efficiency than MLP. This likely translates also to needing less data. We're already at the point with the frontier LLMs where all the data available from the internet is used + more is made synthetically...so we kind of need something better.
Continual learning:
KAN can handle new input information w/o catastrophic forgetting, which helps to keep a model up to date without relying on some database or retraining.
Sequential data:
This is probably what most people are curious about right now, and KANs are not shown to work with sequential data yet and it's unclear what the best approach might be to make it work well both in training and regarding the interpretability aspect. That said, there's a rich long history of achieving sequential data in variety of ways, so I don't think getting the ball rolling here would be too challenging.
Mostly, I just love a new paradigm and I want to see more!
https://huggingface.co/papers/2404.19756 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1626358426339-noauth.png",
"fullname": "Harrison Kinsley",
"name": "Sentdex",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3035,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60f042c1070aef1be52f7041/uxSJcVkmww67aFen9514a.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"valeriylo",
"victor",
"mmhamdy",
"radames",
"turiabu",
"jorostuff",
"KillerShoaib",
"jgitsolutions",
"dillfrescott",
"Roozbehr72",
"Adien123",
"makiisthebes",
"fendy07"
],
"count": 13
},
{
"reaction": "👍",
"users": [
"Jaward",
"sergeodesico",
"rennokki",
"sseres",
"Noomam",
"dillfrescott",
"Pwlot",
"nebiyu29"
],
"count": 8
},
{
"reaction": "🤗",
"users": [
"gospacedev",
"Noomam",
"dillfrescott"
],
"count": 3
},
{
"reaction": "👀",
"users": [
"nateraw",
"dillfrescott",
"Ananze"
],
"count": 3
},
{
"reaction": "🚀",
"users": [
"aboundy",
"dillfrescott",
"jtrapp44"
],
"count": 3
}
] | 2024-05-01T20:24:41.000Z | 2024-07-17T11:16:31.604Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png",
"fullname": "Tom Aarsen",
"name": "tomaarsen",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 1060,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg",
"fullname": "Radamés Ajna",
"name": "radames",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2401,
"isFollowing": false
},
{
"avatarUrl": "/avatars/5d370df2645513097e9b1f5479d76e7f.svg",
"fullname": "Persian",
"name": "Persianform",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6319030647a84df2a5dd106c/30BgJnWG0sSLq9nX_JIWU.png",
"fullname": "Michael Peres",
"name": "makiisthebes",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/QffcxjI5_cEwm5akdPeum.jpeg",
"fullname": "Nikos",
"name": "nterizakis",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/Sentdex/556933203881858 | 8,252 | 5 |
514816383704022 | [
{
"type": "text",
"value": "🪽 New FluentlyXL V4!",
"raw": "🪽 New FluentlyXL V4!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤗 Finally the model came out, it turned out great, we worked on contrast, realism and anatomy.",
"raw": "🤗 Finally the model came out, it turned out great, we worked on contrast, realism and anatomy.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👨💻 Model page: ",
"raw": "👨💻 Model page: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/fluently/Fluently-XL-v4",
"href": null,
"resource": {
"type": "model",
"id": "fluently/Fluently-XL-v4",
"discussionNum": null
},
"url": "https://huggingface.co/fluently/Fluently-XL-v4",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🦾 You can try it here: ",
"raw": "🦾 You can try it here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/fluently/Fluently-Playground",
"href": null,
"resource": {
"type": "space",
"id": "fluently/Fluently-Playground",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/fluently/Fluently-Playground",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🪽 New FluentlyXL V4!
🤗 Finally the model came out, it turned out great, we worked on contrast, realism and anatomy.
👨💻 Model page: https://huggingface.co/fluently/Fluently-XL-v4
🦾 You can try it here: https://huggingface.co/spaces/fluently/Fluently-Playground | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/o-5N9QyjHgmSMk69e3O55.png",
"fullname": "Evgeniy Hristoforu",
"name": "ehristoforu",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 235,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/BxteiSqTncDSF07QXiEHX.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"ehristoforu",
"dreamdrop-art",
"lunarflu",
"victor",
"radames",
"DmitryRyumin",
"Delik",
"frankmao",
"lailailailailai"
],
"count": 9
},
{
"reaction": "🚀",
"users": [
"victor",
"ehristoforu",
"radames",
"dreamdrop-art"
],
"count": 4
}
] | 2024-05-01T16:45:28.000Z | 2024-05-01T22:00:38.569Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
}
] | /posts/ehristoforu/514816383704022 | 4,173 | 2 |
897871685021833 | [
{
"type": "text",
"value": "Can you create domain-specific synthetic datasets in under 20 minutes?",
"raw": "Can you create domain-specific synthetic datasets in under 20 minutes?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@burtenshaw",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "burtenshaw",
"label": null,
"lang": null
},
{
"type": "text",
"value": " recently launched the Domain Specific Dataset Project as part of Data is Better Together. As part of this, Ben created a Space that you can use to define some key perspectives and concepts from a domain. This seed dataset can then be used to generate a synthetic dataset for a particular domain. ",
"raw": " recently launched the Domain Specific Dataset Project as part of Data is Better Together. As part of this, Ben created a Space that you can use to define some key perspectives and concepts from a domain. This seed dataset can then be used to generate a synthetic dataset for a particular domain. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "In less than 30 minutes this afternoon, I created a domain-specific dataset focused on data-centric machine learning using these tools: ",
"raw": "In less than 30 minutes this afternoon, I created a domain-specific dataset focused on data-centric machine learning using these tools: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/davanstrien/data-centric-ml-sft",
"href": null,
"resource": {
"type": "dataset",
"id": "davanstrien/data-centric-ml-sft",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/davanstrien/data-centric-ml-sft",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can create your own domain specific datasets using this approach. Find the steps to follow here: ",
"raw": "You can create your own domain specific datasets using this approach. Find the steps to follow here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/data-is-better-together/blob/main/domain-specific-datasets/README.md",
"href": "https://github.com/huggingface/data-is-better-together/blob/main/domain-specific-datasets/README.md",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Can you create domain-specific synthetic datasets in under 20 minutes?
@burtenshaw recently launched the Domain Specific Dataset Project as part of Data is Better Together. As part of this, Ben created a Space that you can use to define some key perspectives and concepts from a domain. This seed dataset can then be used to generate a synthetic dataset for a particular domain.
In less than 30 minutes this afternoon, I created a domain-specific dataset focused on data-centric machine learning using these tools: https://huggingface.co/datasets/davanstrien/data-centric-ml-sft.
You can create your own domain specific datasets using this approach. Find the steps to follow here: https://github.com/huggingface/data-is-better-together/blob/main/domain-specific-datasets/README.md | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg",
"fullname": "Daniel van Strien",
"name": "davanstrien",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 410,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png",
"fullname": "ben burtenshaw",
"name": "burtenshaw",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 67
}
] | [
{
"reaction": "🤗",
"users": [
"alvarobartt",
"lunarflu",
"ZennyKenny",
"fdaudens",
"burtenshaw",
"tomaarsen",
"Taylor658",
"ucyang"
],
"count": 8
},
{
"reaction": "🔥",
"users": [
"KingNish",
"tomaarsen"
],
"count": 2
}
] | 2024-05-01T14:59:48.000Z | 2024-05-01T19:32:53.615Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg",
"fullname": "Adam Molnar",
"name": "lunarflu",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 333,
"isFollowing": false
}
] | /posts/davanstrien/897871685021833 | 1,535 | 1 |
721003683147092 | [
{
"type": "text",
"value": "If you are a debate fan or did this as an extracurricular activity as a kid, you might have fun with this demo - debate bot. Debate against AI/RAG:",
"raw": "If you are a debate fan or did this as an extracurricular activity as a kid, you might have fun with this demo - debate bot. Debate against AI/RAG:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/vectara/debate-bot",
"href": null,
"resource": {
"type": "space",
"id": "vectara/debate-bot",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/vectara/debate-bot",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | If you are a debate fan or did this as an extracurricular activity as a kid, you might have fun with this demo - debate bot. Debate against AI/RAG:
https://huggingface.co/spaces/vectara/debate-bot | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/631fcd70c1a8269da399c3ae/qDNZFvD8zTgh5VkR9W6BD.jpeg",
"fullname": "Ofer Mendelevitch",
"name": "ofermend",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 11,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"lunarflu",
"kelork"
],
"count": 2
},
{
"reaction": "🧠",
"users": [
"lunarflu"
],
"count": 1
},
{
"reaction": "❤️",
"users": [
"lunarflu"
],
"count": 1
},
{
"reaction": "👀",
"users": [
"ZyanLeyy"
],
"count": 1
}
] | 2024-05-01T14:36:44.000Z | 2024-05-07T04:27:41.896Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/631fcd70c1a8269da399c3ae/qDNZFvD8zTgh5VkR9W6BD.jpeg",
"fullname": "Ofer Mendelevitch",
"name": "ofermend",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 11,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg",
"fullname": "Adam Molnar",
"name": "lunarflu",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 333,
"isFollowing": false
},
{
"avatarUrl": "/avatars/a12d9ab95ad9eca1278ca61a1376a867.svg",
"fullname": "berat",
"name": "kelork",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
}
] | /posts/ofermend/721003683147092 | 1,750 | 4 |
595468300497777 | [
{
"type": "text",
"value": "New conversation in our Journalists on Hugging Face community: Exploring auto-tagging articles for taxonomy.",
"raw": "New conversation in our Journalists on Hugging Face community: Exploring auto-tagging articles for taxonomy.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I've shared insights from my previous experience with fine-tuning a model for a classification task. But has anyone built a similar use case? Or are you seeking a solution for this task too? Join the discussion here: ",
"raw": "I've shared insights from my previous experience with fine-tuning a model for a classification task. But has anyone built a similar use case? Or are you seeking a solution for this task too? Join the discussion here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/JournalistsonHF/README/discussions/2#6631b9a250c203b5a925d2e1",
"href": null,
"resource": {
"type": "space",
"id": "JournalistsonHF/README",
"discussionNum": 2
},
"url": "https://huggingface.co/spaces/JournalistsonHF/README/discussions/2#6631b9a250c203b5a925d2e1",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | New conversation in our Journalists on Hugging Face community: Exploring auto-tagging articles for taxonomy.
I've shared insights from my previous experience with fine-tuning a model for a classification task. But has anyone built a similar use case? Or are you seeking a solution for this task too? Join the discussion here: https://huggingface.co/spaces/JournalistsonHF/README/discussions/2#6631b9a250c203b5a925d2e1 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [] | [] | [] | 2024-05-01T14:31:17.000Z | 2024-05-01T14:31:17.070Z | [] | /posts/fdaudens/595468300497777 | 1,526 | 0 |
246618337818506 | [
{
"type": "text",
"value": "Octopus v4",
"raw": "Octopus v4",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Graph of language models",
"raw": "Graph of language models",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.19296",
"href": null,
"resource": {
"type": "paper",
"id": "2404.19296",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.19296",
"code": null,
"user": null,
"label": "Octopus v4: Graph of language models (2404.19296)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Language models have been effective in a wide range of applications, yet the most sophisticated models are often proprietary. For example, GPT-4 by OpenAI and various models by Anthropic are expensive and consume substantial energy. In contrast, the open-source community has produced competitive models, like Llama3. Furthermore, niche-specific smaller language models, such as those tailored for legal, medical or financial tasks, have outperformed their proprietary counterparts. This paper introduces a novel approach that employs functional tokens to integrate multiple open-source models, each optimized for particular tasks. Our newly developed Octopus v4 model leverages functional tokens to intelligently direct user queries to the most appropriate vertical model and reformat the query to achieve the best performance. Octopus v4, an evolution of the Octopus v1, v2, and v3 models, excels in selection and parameter understanding and reformatting. Additionally, we explore the use of graph as a versatile data structure that effectively coordinates multiple open-source models by harnessing the capabilities of the Octopus model and functional tokens. ",
"raw": "Language models have been effective in a wide range of applications, yet the most sophisticated models are often proprietary. For example, GPT-4 by OpenAI and various models by Anthropic are expensive and consume substantial energy. In contrast, the open-source community has produced competitive models, like Llama3. Furthermore, niche-specific smaller language models, such as those tailored for legal, medical or financial tasks, have outperformed their proprietary counterparts. This paper introduces a novel approach that employs functional tokens to integrate multiple open-source models, each optimized for particular tasks. Our newly developed Octopus v4 model leverages functional tokens to intelligently direct user queries to the most appropriate vertical model and reformat the query to achieve the best performance. Octopus v4, an evolution of the Octopus v1, v2, and v3 models, excels in selection and parameter understanding and reformatting. Additionally, we explore the use of graph as a versatile data structure that effectively coordinates multiple open-source models by harnessing the capabilities of the Octopus model and functional tokens. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Octopus v4
Graph of language models
https://huggingface.co/papers/2404.19296
Language models have been effective in a wide range of applications, yet the most sophisticated models are often proprietary. For example, GPT-4 by OpenAI and various models by Anthropic are expensive and consume substantial energy. In contrast, the open-source community has produced competitive models, like Llama3. Furthermore, niche-specific smaller language models, such as those tailored for legal, medical or financial tasks, have outperformed their proprietary counterparts. This paper introduces a novel approach that employs functional tokens to integrate multiple open-source models, each optimized for particular tasks. Our newly developed Octopus v4 model leverages functional tokens to intelligently direct user queries to the most appropriate vertical model and reformat the query to achieve the best performance. Octopus v4, an evolution of the Octopus v1, v2, and v3 models, excels in selection and parameter understanding and reformatting. Additionally, we explore the use of graph as a versatile data structure that effectively coordinates multiple open-source models by harnessing the capabilities of the Octopus model and functional tokens.
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg",
"fullname": "AK",
"name": "akhaliq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5205,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/vmqLvddBzC50laUy6Aomw.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"martineden"
],
"count": 1
}
] | 2024-05-01T14:25:06.000Z | 2024-05-01T14:25:51.940Z | [] | /posts/akhaliq/246618337818506 | 4,714 | 0 |
352663514686700 | [
{
"type": "text",
"value": "Hello Community,",
"raw": "Hello Community,",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Would you like to see Illusion Diffusion in Video format. ",
"raw": "Would you like to see Illusion Diffusion in Video format. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/AP123/IllusionDiffusion",
"href": null,
"resource": {
"type": "space",
"id": "AP123/IllusionDiffusion",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/AP123/IllusionDiffusion",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Let me Know.",
"raw": "Let me Know.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Hello Community,
Would you like to see Illusion Diffusion in Video format. https://huggingface.co/spaces/AP123/IllusionDiffusion
Let me Know. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
} | [] | [] | [
{
"reaction": "➕",
"users": [
"nijatzeynalov",
"ijohn07",
"victor",
"Nick088",
"SvCy",
"NHLOCAL",
"fffiloni",
"Aldyytzyy",
"maxvirusvj"
],
"count": 9
}
] | 2024-05-01T12:32:04.000Z | 2024-05-11T04:08:25.894Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64024bb5cad36eb2b0fa8042/wFgZ0Tq29nJmAtwW7JfPf.png",
"fullname": "EP",
"name": "EveryPizza",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/LarTa1EMmhoy8D_GRXv8q.jpeg",
"fullname": "Aldyytzyy",
"name": "Aldyytzyy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
}
] | /posts/KingNish/352663514686700 | 2,816 | 6 |
968392040222782 | [
{
"type": "text",
"value": "Leonardo Ai",
"raw": "Leonardo Ai",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Leonardo Ai | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/Ge7xPb2U0HIB9pCM7vAWQ.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/wafHhUL4-YSYG0FTUCVPF.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"Chunte",
"not-lain",
"dillfrescott",
"Joseph717171"
],
"count": 4
},
{
"reaction": "👀",
"users": [
"dillfrescott",
"Joseph717171"
],
"count": 2
}
] | 2024-05-01T11:10:07.000Z | 2024-05-01T11:10:07.215Z | [] | /posts/phenixrhyder/968392040222782 | 1,441 | 0 |
777238948397674 | [
{
"type": "text",
"value": "Dear all,",
"raw": "Dear all,",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We are happy to share that we have just open-sourced over 200 low-bit LLMs. For the MLX community, we have prepared 2-4 bit versions of mainstream LLMs. You can visit the following collection to access them: ",
"raw": "We are happy to share that we have just open-sourced over 200 low-bit LLMs. For the MLX community, we have prepared 2-4 bit versions of mainstream LLMs. You can visit the following collection to access them: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/GreenBitAI/greenbitai-mlx-llm-6614eb6ceb8da657c2b4ed58",
"href": null,
"resource": {
"type": "collection",
"id": "GreenBitAI/greenbitai-mlx-llm-6614eb6ceb8da657c2b4ed58",
"discussionNum": null
},
"url": "https://huggingface.co/collections/GreenBitAI/greenbitai-mlx-llm-6614eb6ceb8da657c2b4ed58",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". ",
"raw": ". ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "These low-bit models can be conveniently used through our open-source tool at ",
"raw": "These low-bit models can be conveniently used through our open-source tool at ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/GreenBitAI/gbx-lm",
"href": "https://github.com/GreenBitAI/gbx-lm",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". ",
"raw": ". ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Compared to other open-source quantization algorithms, these models provide better accuracy retention. We have provided some model evaluation results here: ",
"raw": "Compared to other open-source quantization algorithms, these models provide better accuracy retention. We have provided some model evaluation results here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/GreenBitAI/green-bit-llm/blob/main/green_bit_llm/evaluation/README.md",
"href": "https://github.com/GreenBitAI/green-bit-llm/blob/main/green_bit_llm/evaluation/README.md",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". ",
"raw": ". ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can also evaluate the models yourself using the evaluation script we provided.",
"raw": "You can also evaluate the models yourself using the evaluation script we provided.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Dear all,
We are happy to share that we have just open-sourced over 200 low-bit LLMs. For the MLX community, we have prepared 2-4 bit versions of mainstream LLMs. You can visit the following collection to access them: https://huggingface.co/collections/GreenBitAI/greenbitai-mlx-llm-6614eb6ceb8da657c2b4ed58.
These low-bit models can be conveniently used through our open-source tool at https://github.com/GreenBitAI/gbx-lm.
Compared to other open-source quantization algorithms, these models provide better accuracy retention. We have provided some model evaluation results here:
https://github.com/GreenBitAI/green-bit-llm/blob/main/green_bit_llm/evaluation/README.md.
You can also evaluate the models yourself using the evaluation script we provided. | {
"avatarUrl": "/avatars/8c8a0055f62bd2a12f5cc6eb9a50b7f3.svg",
"fullname": "Haojin Yang",
"name": "yanghaojin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"victor",
"yanghaojin",
"alobnayis",
"NicoNico",
"KingNish",
"adhisetiawan",
"clem"
],
"count": 8
},
{
"reaction": "👍",
"users": [
"NicoNico",
"adhisetiawan",
"clem"
],
"count": 3
}
] | 2024-05-01T11:07:39.000Z | 2024-05-03T05:14:05.838Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64b9f4569d8360edd98a9b62/HsPmCGBbgcrtj0VLVY_ex.png",
"fullname": "fhsp",
"name": "fhsp93",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
}
] | /posts/yanghaojin/777238948397674 | 1,347 | 1 |
121704956679248 | [
{
"type": "text",
"value": "two new VLM benchmarks! 🤩",
"raw": "two new VLM benchmarks! 🤩",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "BLINK: evaluates tasks that humans can solve within a blink 👀 ",
"raw": "BLINK: evaluates tasks that humans can solve within a blink 👀 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/BLINK-Benchmark/BLINK",
"href": null,
"resource": {
"type": "dataset",
"id": "BLINK-Benchmark/BLINK",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/BLINK-Benchmark/BLINK",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "SEED-2-Plus: multichoice questions on charts, maps, webs 😍 ",
"raw": "SEED-2-Plus: multichoice questions on charts, maps, webs 😍 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/AILab-CVC/SEED-Bench-2-plus",
"href": null,
"resource": {
"type": "dataset",
"id": "AILab-CVC/SEED-Bench-2-plus",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/AILab-CVC/SEED-Bench-2-plus",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | two new VLM benchmarks! 🤩
BLINK: evaluates tasks that humans can solve within a blink 👀 https://huggingface.co/datasets/BLINK-Benchmark/BLINK
SEED-2-Plus: multichoice questions on charts, maps, webs 😍 https://huggingface.co/datasets/AILab-CVC/SEED-Bench-2-plus | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1648113222875-6141a88b3a0ec78603c9e784.png",
"fullname": "Merve Noyan",
"name": "merve",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5589,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"qubvel-hf",
"Joseph717171",
"danielus",
"adorkin",
"weirdfish23",
"Nadav314",
"Ndonda871"
],
"count": 7
},
{
"reaction": "❤️",
"users": [
"Joseph717171",
"zamanganji",
"Ndonda871",
"Masum889"
],
"count": 4
}
] | 2024-05-01T11:04:30.000Z | 2024-05-01T11:04:30.063Z | [] | /posts/merve/121704956679248 | 1,895 | 0 |
956077083567238 | [
{
"type": "text",
"value": "text-generation-inference 2.0.2 is out.",
"raw": "text-generation-inference 2.0.2 is out.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Native support for Idefics2, with much better efficiency than llava 1.6 (next) !",
"raw": "- Native support for Idefics2, with much better efficiency than llava 1.6 (next) !",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Phi3, Increase VLM support in the openai layer.",
"raw": "Phi3, Increase VLM support in the openai layer.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Release notes ",
"raw": "Release notes ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2",
"href": "https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | text-generation-inference 2.0.2 is out.
- Native support for Idefics2, with much better efficiency than llava 1.6 (next) !
Phi3, Increase VLM support in the openai layer.
Release notes https://github.com/huggingface/text-generation-inference/releases/tag/v2.0.2 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1608285816082-5e2967b819407e3277369b95.png",
"fullname": "Nicolas Patry",
"name": "Narsil",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 101,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"pierric",
"aroslanov",
"AtAndDev"
],
"count": 3
}
] | 2024-05-01T09:48:40.000Z | 2024-05-01T10:51:53.346Z | [] | /posts/Narsil/956077083567238 | 1,226 | 0 |
113952082634225 | [
{
"type": "text",
"value": "🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟🎭🚀",
"raw": "🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟🎭🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📄 Title: EMOPortraits: Emotion-enhanced Multimodal One-shot Head Avatars 🔝",
"raw": "📄 Title: EMOPortraits: Emotion-enhanced Multimodal One-shot Head Avatars 🔝",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📝 Description: EMOPortraits is an enhanced multimodal one-shot head avatar model that achieves SOTA performance in emotion transfer and audio-driven facial animation tasks by improving the training pipeline and architecture to better handle intense and asymmetric facial expressions, while also proposing a novel multiview video dataset containing a wide range of such expressions.",
"raw": "📝 Description: EMOPortraits is an enhanced multimodal one-shot head avatar model that achieves SOTA performance in emotion transfer and audio-driven facial animation tasks by improving the training pipeline and architecture to better handle intense and asymmetric facial expressions, while also proposing a novel multiview video dataset containing a wide range of such expressions.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👥 Authors: Nikita Drobyshev, Antoni Bigata Casademunt, Konstantinos Vougioukas, Zoe Landgraf, Stavros Petridis, and Maja Pantic",
"raw": "👥 Authors: Nikita Drobyshev, Antoni Bigata Casademunt, Konstantinos Vougioukas, Zoe Landgraf, Stavros Petridis, and Maja Pantic",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸",
"raw": "📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📄 Paper: ",
"raw": "📄 Paper: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.19110",
"href": null,
"resource": {
"type": "paper",
"id": "2404.19110",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.19110",
"code": null,
"user": null,
"label": "EMOPortraits: Emotion-enhanced Multimodal One-shot Head Avatars (2404.19110)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌐 GitHub Page: ",
"raw": "🌐 GitHub Page: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://neeek2303.github.io/EMOPortraits",
"href": "https://neeek2303.github.io/EMOPortraits",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📚 More Papers: more cutting-edge research presented at other conferences in the ",
"raw": "📚 More Papers: more cutting-edge research presented at other conferences in the ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers",
"href": null,
"resource": {
"type": "space",
"id": "DmitryRyumin/NewEraAI-Papers",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " curated by ",
"raw": " curated by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@DmitryRyumin",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "DmitryRyumin",
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀 Added to the Avatars Collection: ",
"raw": "🚀 Added to the Avatars Collection: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"href": null,
"resource": {
"type": "collection",
"id": "DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"discussionNum": null
},
"url": "https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔍 Keywords: #EMOPortraits #EmotionalTransfer #FacialAnimation #HeadAvatar #MultimodalLearning #OneShotLearning #AsymmetricFacialExpressions #IntenseFacialExpressions #NovelDataset #CVPR2024 #DeepLearning #ComputerVision #Innovation",
"raw": "🔍 Keywords: #EMOPortraits #EmotionalTransfer #FacialAnimation #HeadAvatar #MultimodalLearning #OneShotLearning #AsymmetricFacialExpressions #IntenseFacialExpressions #NovelDataset #CVPR2024 #DeepLearning #ComputerVision #Innovation",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟🎭🚀
📄 Title: EMOPortraits: Emotion-enhanced Multimodal One-shot Head Avatars 🔝
📝 Description: EMOPortraits is an enhanced multimodal one-shot head avatar model that achieves SOTA performance in emotion transfer and audio-driven facial animation tasks by improving the training pipeline and architecture to better handle intense and asymmetric facial expressions, while also proposing a novel multiview video dataset containing a wide range of such expressions.
👥 Authors: Nikita Drobyshev, Antoni Bigata Casademunt, Konstantinos Vougioukas, Zoe Landgraf, Stavros Petridis, and Maja Pantic
📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸
📄 Paper: https://huggingface.co/papers/2404.19110
🌐 GitHub Page: https://neeek2303.github.io/EMOPortraits
📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin
🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36
🔍 Keywords: #EMOPortraits #EmotionalTransfer #FacialAnimation #HeadAvatar #MultimodalLearning #OneShotLearning #AsymmetricFacialExpressions #IntenseFacialExpressions #NovelDataset #CVPR2024 #DeepLearning #ComputerVision #Innovation | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg",
"fullname": "Dmitry Ryumin",
"name": "DmitryRyumin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 377,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/hjnirVMeYz2FDCatOfjsY.png"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/4W1mQ8Jjp3OiA18iNm7KP.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/PDn-YrM4honTlp9JnqV0p.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/32WwCYZv_gprUfOaDkv6g.mp4"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Zt73sHWBrXGBSeUcXN35q.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/PYZpQZYVderAx46A-1QlR.png"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/DjMbhP9p60ooHLhREKdrn.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/3vmqoYeMwULeJZXOtjqI2.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Du3BFzf5A6Dzu8JaxrPJV.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/2BEi1QVvWgv9EYlZrS2sL.mp4"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/cuT-KurUCNhPmviX2uI_4.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/6IjFFqNqgdeSoYne7gmjz.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/p4osla5vfL8krVQPrgwZ9.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/bs8e7l8YqWJsft4jVxALX.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg",
"fullname": "Dmitry Ryumin",
"name": "DmitryRyumin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 377
}
] | [
{
"reaction": "🔥",
"users": [
"DmitryRyumin",
"KingNish",
"victor",
"Delik",
"jeffboudier",
"RedSparkie",
"AlekseiPravdin",
"nroggendorff"
],
"count": 8
},
{
"reaction": "🤗",
"users": [
"denizaybey",
"fffiloni"
],
"count": 2
},
{
"reaction": "❤️",
"users": [
"AlekseiPravdin"
],
"count": 1
},
{
"reaction": "🚀",
"users": [
"sandz7"
],
"count": 1
},
{
"reaction": "👍",
"users": [
"yellowstar0311"
],
"count": 1
}
] | 2024-05-01T08:19:53.000Z | 2024-05-09T00:08:15.161Z | [
{
"avatarUrl": "/avatars/830f6914a270bef550c5469a6da96873.svg",
"fullname": "Toru Ichikawa",
"name": "spectrecoder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg",
"fullname": "Dmitry Ryumin",
"name": "DmitryRyumin",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 377,
"isFollowing": false
}
] | /posts/DmitryRyumin/113952082634225 | 1,411 | 3 |
455380547996975 | [
{
"type": "text",
"value": "Really excited to read about Kolmogorov Arnold Networks as a novel alternatives to Multi Layer Perceptrons.",
"raw": "Really excited to read about Kolmogorov Arnold Networks as a novel alternatives to Multi Layer Perceptrons.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Excerpt:",
"raw": "Excerpt:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "> Kolmogorov-Arnold Networks (KANs) are promising alternatives of Multi-Layer Perceptrons (MLPs). KANs have strong mathematical foundations just like MLPs: MLPs are based on the universal approximation theorem, while KANs are based on Kolmogorov-Arnold representation theorem. KANs and MLPs are dual: KANs have activation functions on edges, while MLPs have activation functions on nodes. This simple change makes KANs better (sometimes much better!) than MLPs in terms of both model accuracy and interpretability.",
"raw": "> Kolmogorov-Arnold Networks (KANs) are promising alternatives of Multi-Layer Perceptrons (MLPs). KANs have strong mathematical foundations just like MLPs: MLPs are based on the universal approximation theorem, while KANs are based on Kolmogorov-Arnold representation theorem. KANs and MLPs are dual: KANs have activation functions on edges, while MLPs have activation functions on nodes. This simple change makes KANs better (sometimes much better!) than MLPs in terms of both model accuracy and interpretability.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/KindXiaoming/pykan",
"href": "https://github.com/KindXiaoming/pykan",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Really excited to read about Kolmogorov Arnold Networks as a novel alternatives to Multi Layer Perceptrons.
Excerpt:
> Kolmogorov-Arnold Networks (KANs) are promising alternatives of Multi-Layer Perceptrons (MLPs). KANs have strong mathematical foundations just like MLPs: MLPs are based on the universal approximation theorem, while KANs are based on Kolmogorov-Arnold representation theorem. KANs and MLPs are dual: KANs have activation functions on edges, while MLPs have activation functions on nodes. This simple change makes KANs better (sometimes much better!) than MLPs in terms of both model accuracy and interpretability.
https://github.com/KindXiaoming/pykan | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6093a02dc4a92d63a91c5236/yUte6V0FU0BvVFAbON-9n.jpeg",
"fullname": "Diwank Tomer",
"name": "diwank",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 29,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"valeriylo",
"Hardeh",
"AtAndDev"
],
"count": 3
}
] | 2024-05-01T04:57:44.000Z | 2024-05-01T11:01:29.958Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
}
] | /posts/diwank/455380547996975 | 1,647 | 1 |
146608655577384 | [
{
"type": "text",
"value": "⌚ Visiting the past with Time Machine GPT!",
"raw": "⌚ Visiting the past with Time Machine GPT!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We are all familiar with the concept of a suite of models being a series of variants of a certain model that differ mainly in size. For example, Llama-2 7B, Llama-2 13B, Llama-2 70B",
"raw": "We are all familiar with the concept of a suite of models being a series of variants of a certain model that differ mainly in size. For example, Llama-2 7B, Llama-2 13B, Llama-2 70B",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "But this is not always the case. Researchers from The University of Oxford, The Alan Turing Institute, and The University of Manchester introduced TimeMachineGPT (TiMaGPT), a suite of language models that were pretrained on data constrained by a certain period in time. Instead of various sizes of the model, you get the same model but trained on different data coming from different times.",
"raw": "But this is not always the case. Researchers from The University of Oxford, The Alan Turing Institute, and The University of Manchester introduced TimeMachineGPT (TiMaGPT), a suite of language models that were pretrained on data constrained by a certain period in time. Instead of various sizes of the model, you get the same model but trained on different data coming from different times.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Using a GPT-2 model architecture with 117 million parameters, they trained 12 different models on Wikipedia and WMT News from 2011 to 2022 with each year represented by a model. For example, TiMaGPT-2011, TiMaGPT-2012, ..., TiMaGPT-2022.",
"raw": "Using a GPT-2 model architecture with 117 million parameters, they trained 12 different models on Wikipedia and WMT News from 2011 to 2022 with each year represented by a model. For example, TiMaGPT-2011, TiMaGPT-2012, ..., TiMaGPT-2022.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤔 But how could these models be useful?",
"raw": "🤔 But how could these models be useful?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "They can be very useful. For example:",
"raw": "They can be very useful. For example:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1️⃣ Most language models are static in the sense that they are trapped in the time bubble of their pretraining data, their knowledge is limited by the cut-off date of their training dataset. In order to update their knowledge, Temporal Adaptation can be performed, which means further training on newer data. The TiMaGPT series of models can be used to study the limitations of Temporal Adaptation of language models.",
"raw": "1️⃣ Most language models are static in the sense that they are trapped in the time bubble of their pretraining data, their knowledge is limited by the cut-off date of their training dataset. In order to update their knowledge, Temporal Adaptation can be performed, which means further training on newer data. The TiMaGPT series of models can be used to study the limitations of Temporal Adaptation of language models.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2️⃣ Word meaning can change not only with its context but also with its time of use and there is a large amount of research that focuses on understanding how embeddings shift through time. TiMaGPT will be very helpful in studying this phenomenon.",
"raw": "2️⃣ Word meaning can change not only with its context but also with its time of use and there is a large amount of research that focuses on understanding how embeddings shift through time. TiMaGPT will be very helpful in studying this phenomenon.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "3️⃣ One more use case in the context of Time-series forecasting and event prediction is \"backtesting\". Which is using historical data to evaluate new models for forecasting the future. Models like TiMaGPT (each living in its own time without any knowledge of the future/present) will be great for such a use case.",
"raw": "3️⃣ One more use case in the context of Time-series forecasting and event prediction is \"backtesting\". Which is using historical data to evaluate new models for forecasting the future. Models like TiMaGPT (each living in its own time without any knowledge of the future/present) will be great for such a use case.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤗 All models and datasets are on the hub: ",
"raw": "🤗 All models and datasets are on the hub: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/Ti-Ma",
"href": "https://huggingface.co/Ti-Ma",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | ⌚ Visiting the past with Time Machine GPT!
We are all familiar with the concept of a suite of models being a series of variants of a certain model that differ mainly in size. For example, Llama-2 7B, Llama-2 13B, Llama-2 70B
But this is not always the case. Researchers from The University of Oxford, The Alan Turing Institute, and The University of Manchester introduced TimeMachineGPT (TiMaGPT), a suite of language models that were pretrained on data constrained by a certain period in time. Instead of various sizes of the model, you get the same model but trained on different data coming from different times.
Using a GPT-2 model architecture with 117 million parameters, they trained 12 different models on Wikipedia and WMT News from 2011 to 2022 with each year represented by a model. For example, TiMaGPT-2011, TiMaGPT-2012, ..., TiMaGPT-2022.
🤔 But how could these models be useful?
They can be very useful. For example:
1️⃣ Most language models are static in the sense that they are trapped in the time bubble of their pretraining data, their knowledge is limited by the cut-off date of their training dataset. In order to update their knowledge, Temporal Adaptation can be performed, which means further training on newer data. The TiMaGPT series of models can be used to study the limitations of Temporal Adaptation of language models.
2️⃣ Word meaning can change not only with its context but also with its time of use and there is a large amount of research that focuses on understanding how embeddings shift through time. TiMaGPT will be very helpful in studying this phenomenon.
3️⃣ One more use case in the context of Time-series forecasting and event prediction is "backtesting". Which is using historical data to evaluate new models for forecasting the future. Models like TiMaGPT (each living in its own time without any knowledge of the future/present) will be great for such a use case.
🤗 All models and datasets are on the hub: https://huggingface.co/Ti-Ma | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1650745211725-noauth.png",
"fullname": "Mohammed Hamdy",
"name": "mmhamdy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 38,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/62645f88c39850dc093d6105/p8JAjRuim4ZzcCv7_6BCU.png"
}
] | [] | [
{
"reaction": "❤️",
"users": [
"samusenps",
"monsoon-nlp",
"maywell",
"JackCloudman",
"adamelliotfields"
],
"count": 5
},
{
"reaction": "👍",
"users": [
"aloobun",
"eelang"
],
"count": 2
},
{
"reaction": "🔥",
"users": [
"samusenps"
],
"count": 1
}
] | 2024-04-30T21:12:26.000Z | 2024-04-30T22:23:38.823Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1650745211725-noauth.png",
"fullname": "Mohammed Hamdy",
"name": "mmhamdy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 38,
"isFollowing": false
}
] | /posts/mmhamdy/146608655577384 | 1,773 | 1 |
475580685573571 | [
{
"type": "text",
"value": "🚀 just reached 3K+ readers on this blog post about RAG using only HF🤗 related tools in just a little over 1 week from publishing. ",
"raw": "🚀 just reached 3K+ readers on this blog post about RAG using only HF🤗 related tools in just a little over 1 week from publishing. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📃the most interesting thing about it is that you can use the FAISS index in the datasets library to retrieve your most similar documents.",
"raw": "📃the most interesting thing about it is that you can use the FAISS index in the datasets library to retrieve your most similar documents.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3",
"raw": "🔗https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Happy reading everyone ✨",
"raw": "Happy reading everyone ✨",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀 just reached 3K+ readers on this blog post about RAG using only HF🤗 related tools in just a little over 1 week from publishing.
📃the most interesting thing about it is that you can use the FAISS index in the datasets library to retrieve your most similar documents.
🔗https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3
Happy reading everyone ✨ | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg",
"fullname": "Lain",
"name": "not-lain",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 941,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/OuZ5nKZyftHkn1zT9AMoK.png"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"aari1995",
"victor",
"qubvel-hf",
"mooncellar",
"fabio1shot"
],
"count": 5
}
] | 2024-04-30T19:53:05.000Z | 2024-04-30T19:53:05.190Z | [] | /posts/not-lain/475580685573571 | 1,779 | 0 |
100309848935119 | [
{
"type": "text",
"value": "Excited to launch two new SOTA text-to-speech models on the TTS Arena:",
"raw": "Excited to launch two new SOTA text-to-speech models on the TTS Arena:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- OpenVoice V2",
"raw": "- OpenVoice V2",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Play.HT 2.0",
"raw": "- Play.HT 2.0",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "𝗔𝗯𝗼𝘂𝘁 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮",
"raw": "𝗔𝗯𝗼𝘂𝘁 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The TTS Arena is an open sourced Arena where you can enter a prompt, have two models generate speech, and vote on which one is superior.",
"raw": "The TTS Arena is an open sourced Arena where you can enter a prompt, have two models generate speech, and vote on which one is superior.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We compile the results from the votes into a automatically updated leaderboard to allow developers to select the best model.",
"raw": "We compile the results from the votes into a automatically updated leaderboard to allow developers to select the best model.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We've already included models such as ElevenLabs, XTTS, StyleTTS 2, and MetaVoice. The more votes we collect, the sooner we'll be able to show these new models on the leaderboard and compare them!",
"raw": "We've already included models such as ElevenLabs, XTTS, StyleTTS 2, and MetaVoice. The more votes we collect, the sooner we'll be able to show these new models on the leaderboard and compare them!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "𝗢𝗽𝗲𝗻𝗩𝗼𝗶𝗰𝗲 𝗩𝟮",
"raw": "𝗢𝗽𝗲𝗻𝗩𝗼𝗶𝗰𝗲 𝗩𝟮",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "OpenVoice V2 is an open-sourced speech synthesis model created by MyShell AI that supports instant zero-shot voice cloning. It's the next generation of OpenVoice, and is fully open-sourced under the MIT license.",
"raw": "OpenVoice V2 is an open-sourced speech synthesis model created by MyShell AI that supports instant zero-shot voice cloning. It's the next generation of OpenVoice, and is fully open-sourced under the MIT license.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/myshell-ai/OpenVoice",
"href": "https://github.com/myshell-ai/OpenVoice",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "𝗣𝗹𝗮𝘆.𝗛𝗧 𝟮.𝟬",
"raw": "𝗣𝗹𝗮𝘆.𝗛𝗧 𝟮.𝟬",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Play․HT 2.0 is a high-quality proprietary text-to-speech engine. Accessible through their API, this model supports zero-shot voice cloning.",
"raw": "Play․HT 2.0 is a high-quality proprietary text-to-speech engine. Accessible through their API, this model supports zero-shot voice cloning.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "𝗖𝗼𝗺𝗽𝗮𝗿𝗲 𝘁𝗵𝗲 𝗺𝗼𝗱𝗲𝗹𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮:",
"raw": "𝗖𝗼𝗺𝗽𝗮𝗿𝗲 𝘁𝗵𝗲 𝗺𝗼𝗱𝗲𝗹𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/TTS-AGI/TTS-Arena",
"href": null,
"resource": {
"type": "space",
"id": "TTS-AGI/TTS-Arena",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/TTS-AGI/TTS-Arena",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Excited to launch two new SOTA text-to-speech models on the TTS Arena:
- OpenVoice V2
- Play.HT 2.0
𝗔𝗯𝗼𝘂𝘁 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮
The TTS Arena is an open sourced Arena where you can enter a prompt, have two models generate speech, and vote on which one is superior.
We compile the results from the votes into a automatically updated leaderboard to allow developers to select the best model.
We've already included models such as ElevenLabs, XTTS, StyleTTS 2, and MetaVoice. The more votes we collect, the sooner we'll be able to show these new models on the leaderboard and compare them!
𝗢𝗽𝗲𝗻𝗩𝗼𝗶𝗰𝗲 𝗩𝟮
OpenVoice V2 is an open-sourced speech synthesis model created by MyShell AI that supports instant zero-shot voice cloning. It's the next generation of OpenVoice, and is fully open-sourced under the MIT license.
https://github.com/myshell-ai/OpenVoice
𝗣𝗹𝗮𝘆.𝗛𝗧 𝟮.𝟬
Play․HT 2.0 is a high-quality proprietary text-to-speech engine. Accessible through their API, this model supports zero-shot voice cloning.
𝗖𝗼𝗺𝗽𝗮𝗿𝗲 𝘁𝗵𝗲 𝗺𝗼𝗱𝗲𝗹𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗧𝗧𝗦 𝗔𝗿𝗲𝗻𝗮:
https://huggingface.co/spaces/TTS-AGI/TTS-Arena | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png",
"fullname": "mrfakename",
"name": "mrfakename",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 969,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"bunnycore",
"diwank"
],
"count": 2
}
] | 2024-04-30T18:56:09.000Z | 2024-04-30T18:56:09.023Z | [] | /posts/mrfakename/100309848935119 | 2,605 | 0 |
884026993565149 | [
{
"type": "text",
"value": "As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from ",
"raw": "As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Rijgersberg",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Rijgersberg",
"label": null,
"lang": null
},
{
"type": "text",
"value": " et al!)",
"raw": " et al!)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages. ",
"raw": "Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Does LLM, as a judge, work outside of English?",
"raw": "Does LLM, as a judge, work outside of English?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether). ",
"raw": "Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether). ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "What we want to know is:",
"raw": "What we want to know is:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- does auto/LLM eval work in general for a particular language",
"raw": "- does auto/LLM eval work in general for a particular language",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- which model(s) works best as a judge",
"raw": "- which model(s) works best as a judge",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- do LLMs' judgments of non-English models match human preferences? ",
"raw": "- do LLMs' judgments of non-English models match human preferences? ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: ",
"raw": "We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/data-is-better-together/issues/61",
"href": "https://github.com/huggingface/data-is-better-together/issues/61",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Other ideas...",
"raw": "Other ideas...",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Could an approach like ",
"raw": "Could an approach like ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.18796",
"href": null,
"resource": {
"type": "paper",
"id": "2404.18796",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.18796",
"code": null,
"user": null,
"label": "Replacing Judges with Juries: Evaluating LLM Generations with a Panel of\n Diverse Models (2404.18796)",
"lang": null
},
{
"type": "text",
"value": " with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? ",
"raw": " with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from @Rijgersberg et al!)
Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages.
Does LLM, as a judge, work outside of English?
Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether).
What we want to know is:
- does auto/LLM eval work in general for a particular language
- which model(s) works best as a judge
- do LLMs' judgments of non-English models match human preferences?
We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: https://github.com/huggingface/data-is-better-together/issues/61
Other ideas...
Could an approach like https://huggingface.co/papers/2404.18796 with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg",
"fullname": "Daniel van Strien",
"name": "davanstrien",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 410,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png",
"fullname": "Edwin Rijgersberg",
"name": "Rijgersberg",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 45
}
] | [
{
"reaction": "🤗",
"users": [
"NePe",
"kristaller486",
"mmhamdy",
"Rijgersberg",
"alvarobartt"
],
"count": 5
},
{
"reaction": "🔥",
"users": [
"NePe",
"mmhamdy"
],
"count": 2
}
] | 2024-04-30T15:56:01.000Z | 2024-04-30T15:56:01.259Z | [] | /posts/davanstrien/884026993565149 | 1,670 | 0 |
208120266952026 | [
{
"type": "text",
"value": "It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC",
"raw": "It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/bklPxs9FQD-EZnPs5u1mX.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/08Y9CWM9uZhFZsS_xkCsb.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/tV2DrQ0t7uBdHH_CpoYxB.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/dJzoPbNix8uGRYWUsPuoS.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"victor",
"onandon",
"adorkin",
"julien-c",
"alielfilali01",
"luancloud"
],
"count": 6
}
] | 2024-04-30T14:12:05.000Z | 2024-04-30T14:12:05.869Z | [] | /posts/Jaward/208120266952026 | 1,748 | 0 |
441311823846269 | [
{
"type": "text",
"value": "HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!!",
"raw": "HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hyper-SDXL-8steps-CFG-LoRA: ",
"raw": "Hyper-SDXL-8steps-CFG-LoRA: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors",
"href": null,
"resource": {
"type": "model",
"id": "ByteDance/Hyper-SD",
"discussionNum": null
},
"url": "https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hyper-SD15-8steps-CFG-LoRA: ",
"raw": "Hyper-SD15-8steps-CFG-LoRA: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors",
"href": null,
"resource": {
"type": "model",
"id": "ByteDance/Hyper-SD",
"discussionNum": null
},
"url": "https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!!
Hyper-SDXL-8steps-CFG-LoRA: https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors
Hyper-SD15-8steps-CFG-LoRA: https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors | {
"avatarUrl": "/avatars/5fe356d58c4c822a60370dbee8d78a69.svg",
"fullname": "renyuxi",
"name": "renyuxi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 22,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"victor",
"radames",
"OzzyGT",
"multimodalart"
],
"count": 5
}
] | 2024-04-30T13:49:43.000Z | 2024-04-30T17:47:14.850Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg",
"fullname": "Radamés Ajna",
"name": "radames",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2401,
"isFollowing": false
}
] | /posts/renyuxi/441311823846269 | 1,777 | 1 |
949530140359936 | [
{
"type": "text",
"value": "OpenELM in Core ML",
"raw": "OpenELM in Core ML",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers.",
"raw": "Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I converted these models to Core ML, for use on Apple Silicon, using this script: ",
"raw": "I converted these models to Core ML, for use on Apple Silicon, using this script: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084",
"href": "https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: ",
"raw": ". The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194",
"href": null,
"resource": {
"type": "collection",
"id": "corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194",
"discussionNum": null
},
"url": "https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The conversion was done with the following parameters:",
"raw": "The conversion was done with the following parameters:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Precision: float32.",
"raw": "- Precision: float32.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Sequence length: fixed to 128.",
"raw": "- Sequence length: fixed to 128.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "With swift-transformers (",
"raw": "With swift-transformers (",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/swift-transformers",
"href": "https://github.com/huggingface/swift-transformers",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to ",
"raw": "), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`float16`",
"href": null,
"resource": null,
"url": null,
"code": "float16",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ". However, there's some precision loss somewhere and generation doesn't work in ",
"raw": ". However, there's some precision loss somewhere and generation doesn't work in ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`float16`",
"href": null,
"resource": null,
"url": null,
"code": "float16",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: ",
"raw": " mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/huggingface/swift-transformers/issues/95",
"href": "https://github.com/huggingface/swift-transformers/issues/95",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :)",
"raw": "I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome!",
"raw": "Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | OpenELM in Core ML
Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers.
I converted these models to Core ML, for use on Apple Silicon, using this script: https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084. The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194
The conversion was done with the following parameters:
- Precision: float32.
- Sequence length: fixed to 128.
With swift-transformers (https://github.com/huggingface/swift-transformers), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to `float16`. However, there's some precision loss somewhere and generation doesn't work in `float16` mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: https://github.com/huggingface/swift-transformers/issues/95
I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :)
Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg",
"fullname": "Pedro Cuenca",
"name": "pcuenq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 444,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"ZachNagengast",
"brightbit",
"reach-vb",
"victor",
"radames",
"JimRWallace",
"NickyNicky",
"1duo",
"HelloCephalopod",
"not-lain",
"julien-c",
"MathisDevFP",
"maxrubin629",
"michellenicole",
"Rainierraoul",
"anthonymikinka",
"toddmath",
"irotem98",
"velyan"
],
"count": 19
},
{
"reaction": "❤️",
"users": [
"reach-vb",
"JimRWallace",
"SuperL3D",
"HelloCephalopod",
"not-lain",
"azhka",
"MathisDevFP",
"toddmath",
"velyan"
],
"count": 9
},
{
"reaction": "🚀",
"users": [
"reach-vb",
"HelloCephalopod",
"not-lain",
"MathisDevFP",
"Norod78",
"0xjorgev"
],
"count": 6
},
{
"reaction": "🤯",
"users": [
"reach-vb",
"HelloCephalopod",
"not-lain",
"MathisDevFP"
],
"count": 4
}
] | 2024-04-30T13:26:53.000Z | 2024-05-28T06:35:33.955Z | [
{
"avatarUrl": "/avatars/2523b978663d915705cb9ac4cb878d6f.svg",
"fullname": "Stephen",
"name": "smpanaro",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 6,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg",
"fullname": "Pedro Cuenca",
"name": "pcuenq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 444,
"isFollowing": false
},
{
"avatarUrl": "/avatars/a517b6e94c8442cfa212ec0df42c9b1a.svg",
"fullname": "Cebro",
"name": "crbo",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/pcuenq/949530140359936 | 4,328 | 4 |
406668118700661 | [
{
"type": "text",
"value": "Ai forest",
"raw": "Ai forest",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Ai forest | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D3YnOdQtEMVhUST9qh9hz.jpeg",
"fullname": "Aaron Henderson",
"name": "phenixrhyder",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 46,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/G6LgRktLcDlWgv5AlZ5cu.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"samusenps",
"Masum889",
"sbrandeis"
],
"count": 3
},
{
"reaction": "🤗",
"users": [
"hjebuoebduede",
"dillfrescott"
],
"count": 2
}
] | 2024-04-30T10:34:51.000Z | 2024-04-30T11:53:06.303Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1674772332792-noauth.jpeg",
"fullname": "bhagaskara",
"name": "bhagaskara",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/phenixrhyder/406668118700661 | 1,661 | 1 |
419031280358046 | [
{
"type": "text",
"value": "How Robust Is Your Model in Complex Code Generation Tasks? 🤔",
"raw": "How Robust Is Your Model in Complex Code Generation Tasks? 🤔",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We've launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise \"leet code\" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions.",
"raw": "We've launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise \"leet code\" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It seems that the Claude 3 models outperforme ChatGPT:",
"raw": "It seems that the Claude 3 models outperforme ChatGPT:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Model / Avg. (pass@3)",
"raw": "Model / Avg. (pass@3)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Claude 3 Haiku / 27.67",
"raw": "Claude 3 Haiku / 27.67",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "GPT-3.5-Turbo / 23.75",
"raw": "GPT-3.5-Turbo / 23.75",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Mixtral-8x22B-Instruct-v0.1 / 8.35",
"raw": "Mixtral-8x22B-Instruct-v0.1 / 8.35",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Read our Preprint📃: ",
"raw": "Read our Preprint📃: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.18766",
"href": null,
"resource": {
"type": "paper",
"id": "2404.18766",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.18766",
"code": null,
"user": null,
"label": "PECC: Problem Extraction and Coding Challenges (2404.18766)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Look at the dataset🔎: ",
"raw": "Look at the dataset🔎: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/datasets/PatrickHaller/pecc",
"href": null,
"resource": {
"type": "dataset",
"id": "PatrickHaller/pecc",
"discussionNum": null
},
"url": "https://huggingface.co/datasets/PatrickHaller/pecc",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We also got accepted at LREC-COLING '24 🎉",
"raw": "We also got accepted at LREC-COLING '24 🎉",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | How Robust Is Your Model in Complex Code Generation Tasks? 🤔
We've launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise "leet code" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions.
It seems that the Claude 3 models outperforme ChatGPT:
Model / Avg. (pass@3)
Claude 3 Haiku / 27.67
GPT-3.5-Turbo / 23.75
Mixtral-8x22B-Instruct-v0.1 / 8.35
Read our Preprint📃: https://huggingface.co/papers/2404.18766
Look at the dataset🔎: https://huggingface.co/datasets/PatrickHaller/pecc
We also got accepted at LREC-COLING '24 🎉 | {
"avatarUrl": "/avatars/f383357c28a6221d62f49a07eecced03.svg",
"fullname": "Patrick Haller",
"name": "PatrickHaller",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 9,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"victor",
"barthfab",
"alanakbik"
],
"count": 3
}
] | 2024-04-30T08:31:04.000Z | 2024-04-30T08:31:04.733Z | [] | /posts/PatrickHaller/419031280358046 | 1,884 | 0 |
484550038911380 | [
{
"type": "text",
"value": "Introducing JARVIS Tony's voice assistant for You. ",
"raw": "Introducing JARVIS Tony's voice assistant for You. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "JARVIS responds to all your questions in audio format.",
"raw": "JARVIS responds to all your questions in audio format.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Must TRY -> ",
"raw": "Must TRY -> ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/KingNish/JARVIS",
"href": null,
"resource": {
"type": "space",
"id": "KingNish/JARVIS",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/KingNish/JARVIS",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Jarvis is currently equipped to accept text input and provide audio output.",
"raw": "Jarvis is currently equipped to accept text input and provide audio output.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "In the future, it may also support audio input.",
"raw": "In the future, it may also support audio input.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "DEMO Video: ",
"raw": "DEMO Video: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Introducing JARVIS Tony's voice assistant for You.
JARVIS responds to all your questions in audio format.
Must TRY -> https://huggingface.co/spaces/KingNish/JARVIS
Jarvis is currently equipped to accept text input and provide audio output.
In the future, it may also support audio input.
DEMO Video:
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6612aedf09f16e7347dfa7e1/Ti6kgCUUizXYvKU7pPAJD.mp4"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"victor",
"sebastianking",
"SvCy",
"DamarJati",
"Ezi",
"Marc-Gloaguen",
"julien-c"
],
"count": 7
},
{
"reaction": "❤️",
"users": [
"KingNish"
],
"count": 1
}
] | 2024-04-30T08:23:33.000Z | 2024-05-01T13:33:45.682Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6456f3ca1ca9debab0554f8b/pQuSK-pS3NnJgXewbODvh.png",
"fullname": "Damar Jati 🍫",
"name": "DamarJati",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 116,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
}
] | /posts/KingNish/484550038911380 | 2,623 | 4 |
775875912158761 | [
{
"type": "text",
"value": "I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps.",
"raw": "I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Basic snippet",
"raw": "Basic snippet",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "code_fence",
"value": null,
"raw": "```\n# pip install gradio_rerun gradio\nimport gradio as gr\nfrom gradio_rerun import Rerun\n\ngr.Interface(\n inputs=gr.File(file_count=\"multiple\", type=\"filepath\"),\n outputs=Rerun(height=900),\n fn=lambda file_path: file_path,\n).launch()\n```",
"href": null,
"resource": null,
"url": null,
"code": "# pip install gradio_rerun gradio\nimport gradio as gr\nfrom gradio_rerun import Rerun\n\ngr.Interface(\n inputs=gr.File(file_count=\"multiple\", type=\"filepath\"),\n outputs=Rerun(height=900),\n fn=lambda file_path: file_path,\n).launch()",
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "More details here ",
"raw": "More details here ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/radames/gradio_rerun",
"href": null,
"resource": {
"type": "space",
"id": "radames/gradio_rerun",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/radames/gradio_rerun",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Source ",
"raw": "Source ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/radames/gradio-rerun-viewer",
"href": "https://github.com/radames/gradio-rerun-viewer",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Follow Rerun here ",
"raw": "Follow Rerun here ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/rerun",
"href": "https://huggingface.co/rerun",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps.
Basic snippet
```
# pip install gradio_rerun gradio
import gradio as gr
from gradio_rerun import Rerun
gr.Interface(
inputs=gr.File(file_count="multiple", type="filepath"),
outputs=Rerun(height=900),
fn=lambda file_path: file_path,
).launch()
```
More details here https://huggingface.co/spaces/radames/gradio_rerun
Source https://github.com/radames/gradio-rerun-viewer
Follow Rerun here https://huggingface.co/rerun | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg",
"fullname": "Radamés Ajna",
"name": "radames",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2401,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6064e095abd8d3692e3e2ed6/m1-Lnytc01ZbRzxxCcxvN.mp4"
}
] | [] | [
{
"reaction": "❤️",
"users": [
"louisbrulenaudet",
"AlekseiPravdin",
"andreasnaoum",
"Tyfo87"
],
"count": 4
}
] | 2024-04-30T04:23:07.000Z | 2024-04-30T04:23:37.694Z | [] | /posts/radames/775875912158761 | 2,445 | 0 |
649395735581384 | [
{
"type": "text",
"value": "💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧",
"raw": "💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲!",
"raw": "🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important?",
"raw": "The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right.",
"raw": "Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The new paper \"Chinchilla Scaling: A replication attempt\" by Epoch AI sets on on the ambitious goal of reproducing this.",
"raw": "The new paper \"Chinchilla Scaling: A replication attempt\" by Epoch AI sets on on the ambitious goal of reproducing this.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data.",
"raw": "But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals.",
"raw": "💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled.",
"raw": "➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have!",
"raw": "✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧
🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲!
The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important?
Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right.
The new paper "Chinchilla Scaling: A replication attempt" by Epoch AI sets on on the ambitious goal of reproducing this.
But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data.
💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals.
➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled.
✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg",
"fullname": "Aymeric Roucher",
"name": "m-ric",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 494,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/0qpW4Z30N3rzAuCslgmk9.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"chansung",
"hllj",
"maywell",
"radames",
"victor",
"cstr"
],
"count": 6
},
{
"reaction": "🤯",
"users": [
"maywell",
"radames",
"louisbrulenaudet",
"KingNish",
"NotSiDDH"
],
"count": 5
}
] | 2024-04-29T17:16:25.000Z | 2024-04-29T17:16:25.389Z | [] | /posts/m-ric/649395735581384 | 2,776 | 0 |
103975211911493 | [
{
"type": "text",
"value": "Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪",
"raw": "Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible ",
"raw": "Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`outlines`",
"href": null,
"resource": null,
"url": null,
"code": "outlines",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " library.",
"raw": " library.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see!",
"raw": "It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Follow ",
"raw": "Follow ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@remi",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "remi",
"label": null,
"lang": null
},
{
"type": "text",
"value": " and other ",
"raw": " and other ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`outlines`",
"href": null,
"resource": null,
"url": null,
"code": "outlines",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " folks to stay on top of the constrained generation game 🧠",
"raw": " folks to stay on top of the constrained generation game 🧠",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪
Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible `outlines` library.
It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see!
Follow @remi and other `outlines` folks to stay on top of the constrained generation game 🧠 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1641203017724-noauth.png",
"fullname": "Joao Gante",
"name": "joaogante",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 96,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/61d2c54d76c37de24cfed058/Oj2CAYMQzelOJBmcEZ4BC.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5de8d7255c51de1bfc829f99/98fxu2lJMyEsh2j2PtsAs.jpeg",
"fullname": "Remi Louf",
"name": "remi",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 27
}
] | [
{
"reaction": "🔥",
"users": [
"victor",
"Andyrasika",
"a9i",
"bunnycore"
],
"count": 4
}
] | 2024-04-29T16:50:52.000Z | 2024-04-29T16:50:52.057Z | [] | /posts/joaogante/103975211911493 | 2,568 | 0 |
885615397037851 | [
{
"type": "text",
"value": "The hype is real: a mysterious gpt2-chatbot model has appeared on the LLM Arena Leaderboard 👀.",
"raw": "The hype is real: a mysterious gpt2-chatbot model has appeared on the LLM Arena Leaderboard 👀.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It seems to be at least on par with the top performing models (closed and open).",
"raw": "It seems to be at least on par with the top performing models (closed and open).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "To try it out: ",
"raw": "To try it out: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://chat.lmsys.org/",
"href": "https://chat.lmsys.org/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " -> then click on the Direct Chat tab and select gpt2-chatbot.",
"raw": " -> then click on the Direct Chat tab and select gpt2-chatbot.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " Take your bet, what do you think it is?",
"raw": " Take your bet, what do you think it is?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | The hype is real: a mysterious gpt2-chatbot model has appeared on the LLM Arena Leaderboard 👀.
It seems to be at least on par with the top performing models (closed and open).
To try it out: https://chat.lmsys.org/ -> then click on the Direct Chat tab and select gpt2-chatbot.
Take your bet, what do you think it is? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"KingNish",
"FreeHugsForRobots",
"radames",
"mvaloatto",
"AlekseiPravdin",
"alfredplpl",
"Lin-Rexter",
"icpro",
"adumred",
"not-lain"
],
"count": 10
},
{
"reaction": "🚀",
"users": [
"KingNish",
"radames",
"xiaotianhan",
"louisbrulenaudet",
"AlekseiPravdin",
"PatrickHaller",
"not-lain"
],
"count": 7
},
{
"reaction": "🤗",
"users": [
"fffiloni",
"VictorSanh",
"not-lain",
"NHLOCAL"
],
"count": 4
}
] | 2024-04-29T16:28:04.000Z | 2024-05-05T09:31:49.737Z | [
{
"avatarUrl": "/avatars/94523d0c7f3997149f29bc2cff7613a7.svg",
"fullname": "Chester Loppershot",
"name": "CCP6",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64ce8a83e7b70e91a2318994/OGZg9S9brKns2M6xvn8qE.jpeg",
"fullname": "Jens Roland",
"name": "FreeHugsForRobots",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1670594087059-630412d57373aacccd88af95.jpeg",
"fullname": "Yasunori Ozaki",
"name": "alfredplpl",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 61,
"isFollowing": false
},
{
"avatarUrl": "/avatars/0ab90ad0a693283ac41459304d93c039.svg",
"fullname": "苏一然",
"name": "Suyiran66",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/victor/885615397037851 | 4,267 | 4 |
913380882014567 | [
{
"type": "text",
"value": "Should media organizations strike deals with big tech companies? Here are two colliding news stories about licensing:",
"raw": "Should media organizations strike deals with big tech companies? Here are two colliding news stories about licensing:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1. The Financial Times has secured a licensing agreement with OpenAI to license its material both for training and queries on ChatGPT. It is the fifth such deal, following similar agreements with Associated Press, Axel Springer, Le Monde and Prisa Media. \"Financial terms were not disclosed.\"",
"raw": "1. The Financial Times has secured a licensing agreement with OpenAI to license its material both for training and queries on ChatGPT. It is the fifth such deal, following similar agreements with Associated Press, Axel Springer, Le Monde and Prisa Media. \"Financial terms were not disclosed.\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "\"Apart from the benefits to the FT, there are broader implications for the industry. It’s right, of course, that AI platforms pay publishers for the use of their material. OpenAI understands the importance of transparency, attribution, and compensation – all essential for us.\"",
"raw": "\"Apart from the benefits to the FT, there are broader implications for the industry. It’s right, of course, that AI platforms pay publishers for the use of their material. OpenAI understands the importance of transparency, attribution, and compensation – all essential for us.\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2. Meanwhile, French media outlet Mediapart is refusing to cash in money from Google, which it is entitled to under so-called \"neighbouring rights\" for the right to display their news content online. ",
"raw": "2. Meanwhile, French media outlet Mediapart is refusing to cash in money from Google, which it is entitled to under so-called \"neighbouring rights\" for the right to display their news content online. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Why? Due to issues with disclosing financial terms: \"The confidentiality clauses imposed by Google today prevent us from publicizing to our readers not only the total amount paid, but also the amount Mediapart is entitled to receive.\"",
"raw": "Why? Due to issues with disclosing financial terms: \"The confidentiality clauses imposed by Google today prevent us from publicizing to our readers not only the total amount paid, but also the amount Mediapart is entitled to receive.\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "\"In our view, financial dependence on platforms is incompatible with our public service mission, which is to make the powerful face up to their responsibilities. It also seems extremely dangerous economically.\"",
"raw": "\"In our view, financial dependence on platforms is incompatible with our public service mission, which is to make the powerful face up to their responsibilities. It also seems extremely dangerous economically.\"",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Two positions at opposite sides of the spectrum.",
"raw": "Two positions at opposite sides of the spectrum.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- The Financial Times and OpenAI strike content licensing deal",
"raw": "- The Financial Times and OpenAI strike content licensing deal",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.ft.com/content/33328743-ba3b-470f-a2e3-f41c3a366613",
"href": "https://www.ft.com/content/33328743-ba3b-470f-a2e3-f41c3a366613",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Droits voisins : Mediapart lance la bataille de la transparence contre Google (in French) https",
"raw": "- Droits voisins : Mediapart lance la bataille de la transparence contre Google (in French) https",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Should media organizations strike deals with big tech companies? Here are two colliding news stories about licensing:
1. The Financial Times has secured a licensing agreement with OpenAI to license its material both for training and queries on ChatGPT. It is the fifth such deal, following similar agreements with Associated Press, Axel Springer, Le Monde and Prisa Media. "Financial terms were not disclosed."
"Apart from the benefits to the FT, there are broader implications for the industry. It’s right, of course, that AI platforms pay publishers for the use of their material. OpenAI understands the importance of transparency, attribution, and compensation – all essential for us."
2. Meanwhile, French media outlet Mediapart is refusing to cash in money from Google, which it is entitled to under so-called "neighbouring rights" for the right to display their news content online.
Why? Due to issues with disclosing financial terms: "The confidentiality clauses imposed by Google today prevent us from publicizing to our readers not only the total amount paid, but also the amount Mediapart is entitled to receive."
"In our view, financial dependence on platforms is incompatible with our public service mission, which is to make the powerful face up to their responsibilities. It also seems extremely dangerous economically."
Two positions at opposite sides of the spectrum.
- The Financial Times and OpenAI strike content licensing deal
https://www.ft.com/content/33328743-ba3b-470f-a2e3-f41c3a366613
- Droits voisins : Mediapart lance la bataille de la transparence contre Google (in French) https | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [] | [] | [] | 2024-04-29T15:18:32.000Z | 2024-04-29T15:18:32.441Z | [] | /posts/fdaudens/913380882014567 | 1,669 | 0 |
129761771868822 | [
{
"type": "text",
"value": "Am I the only one who think command-r-+ is a better daily Assistant than ChatGPT-4? (and it's not even close :D)",
"raw": "Am I the only one who think command-r-+ is a better daily Assistant than ChatGPT-4? (and it's not even close :D)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Am I the only one who think command-r-+ is a better daily Assistant than ChatGPT-4? (and it's not even close :D) | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/9rs3PKby-LhiVa2TY0kQE.mp4"
}
] | [] | [
{
"reaction": "➕",
"users": [
"KingNish",
"jgitsolutions",
"fdaudens",
"radames",
"not-lain",
"WaveCut",
"Alagib",
"Rybens",
"DamarJati",
"Sachiii84"
],
"count": 10
}
] | 2024-04-29T13:16:05.000Z | 2024-05-06T13:25:12.376Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1661165878439-noauth.jpeg",
"fullname": "Valeriy Selitskiy",
"name": "WaveCut",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
},
{
"avatarUrl": "/avatars/74d7b8379b73471a775282bd8d24f321.svg",
"fullname": "Social",
"name": "chandansocial7",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png",
"fullname": "raincandy_U",
"name": "raincandy-u",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 30,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
}
] | /posts/victor/129761771868822 | 2,271 | 6 |
347850352738963 | [
{
"type": "text",
"value": "#ICLR 2024 is almost there 🔥🔥🔥 counting the days to be again in the beautiful city of Vienna participating in the The Twelfth International Conference on Learning Representations, hope to see many of the Hugging Face comunity there!",
"raw": "#ICLR 2024 is almost there 🔥🔥🔥 counting the days to be again in the beautiful city of Vienna participating in the The Twelfth International Conference on Learning Representations, hope to see many of the Hugging Face comunity there!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I would like to contribute 🎁 by releasing the second Knowledge Vault, with 100 lectures visualized from the last 10 years of ICLR from 2014 to 2023, including knowledge graphs for all the Invited Lectures and some extras, with almost 3000 topics represented. (Of course using several AI tools including Llama3)",
"raw": "I would like to contribute 🎁 by releasing the second Knowledge Vault, with 100 lectures visualized from the last 10 years of ICLR from 2014 to 2023, including knowledge graphs for all the Invited Lectures and some extras, with almost 3000 topics represented. (Of course using several AI tools including Llama3)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can explore it here: ",
"raw": "You can explore it here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌏 ",
"raw": "🌏 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://theendofknowledge.com/Vaults/2/ICLR2014-2023.html",
"href": "https://theendofknowledge.com/Vaults/2/ICLR2014-2023.html",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "And you can learn more about the Vaults here: ",
"raw": "And you can learn more about the Vaults here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📝https://www.linkedin.com/pulse/knowledge-vaults-david-vivancos-lbjef/",
"raw": "📝https://www.linkedin.com/pulse/knowledge-vaults-david-vivancos-lbjef/",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hope you like the Knowledge Vault!",
"raw": "Hope you like the Knowledge Vault!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | #ICLR 2024 is almost there 🔥🔥🔥 counting the days to be again in the beautiful city of Vienna participating in the The Twelfth International Conference on Learning Representations, hope to see many of the Hugging Face comunity there!
I would like to contribute 🎁 by releasing the second Knowledge Vault, with 100 lectures visualized from the last 10 years of ICLR from 2014 to 2023, including knowledge graphs for all the Invited Lectures and some extras, with almost 3000 topics represented. (Of course using several AI tools including Llama3)
You can explore it here:
🌏 https://theendofknowledge.com/Vaults/2/ICLR2014-2023.html
And you can learn more about the Vaults here:
📝https://www.linkedin.com/pulse/knowledge-vaults-david-vivancos-lbjef/
Hope you like the Knowledge Vault! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1671537650254-noauth.jpeg",
"fullname": "David Vivancos",
"name": "DavidVivancos",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 27,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63a1a41be36f2e4d5b09f187/6h_yC2OwxSm6031393eIh.mp4"
}
] | [] | [
{
"reaction": "👀",
"users": [
"victor",
"notoookay",
"AlekseiPravdin",
"damerajee"
],
"count": 4
}
] | 2024-04-29T12:40:46.000Z | 2024-04-29T12:40:46.344Z | [] | /posts/DavidVivancos/347850352738963 | 1,945 | 0 |
771789672419906 | [
{
"type": "text",
"value": "If you are into video generation you should check this great Space by ",
"raw": "If you are into video generation you should check this great Space by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@KingNish",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "KingNish",
"label": null,
"lang": null
},
{
"type": "text",
"value": " - generate videos in ~10 seconds!",
"raw": " - generate videos in ~10 seconds!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/KingNish/Instant-Video",
"href": null,
"resource": {
"type": "space",
"id": "KingNish/Instant-Video",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/KingNish/Instant-Video",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | If you are into video generation you should check this great Space by @KingNish - generate videos in ~10 seconds!
https://huggingface.co/spaces/KingNish/Instant-Video | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/FgkyRxhXqD_ZtlcTWBm0q.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/NwGvj1zEIBLTqrM2O2wrF.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/2i8-zGuk7yBk3sX9GRrcc.mp4"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/3ciFHz3BSluJeIoy52ZiL.mp4"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079
}
] | [
{
"reaction": "🔥",
"users": [
"KingNish",
"SvCy"
],
"count": 2
}
] | 2024-04-29T11:58:38.000Z | 2024-04-29T13:15:54.981Z | [] | /posts/victor/771789672419906 | 1,559 | 1 |
790992987985812 | [
{
"type": "link",
"value": null,
"raw": "https://arxiv.org/pdf/2402.06852",
"href": "https://arxiv.org/pdf/2402.06852",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Technical report for ChemLLM 1.5 released.🤗",
"raw": "Technical report for ChemLLM 1.5 released.🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | https://arxiv.org/pdf/2402.06852
Technical report for ChemLLM 1.5 released.🤗 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/bQFX1iFbXEBXcQvUNL811.png",
"fullname": "Di Zhang",
"name": "qq8933",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 108,
"isFollowing": false
} | [] | [] | [
{
"reaction": "😎",
"users": [
"qq8933",
"victor"
],
"count": 2
}
] | 2024-04-29T09:39:36.000Z | 2024-05-03T08:50:37.631Z | [] | /posts/qq8933/790992987985812 | 1,713 | 5 |
455668761137050 | [
{
"type": "text",
"value": "🦢 The SWIM-IR dataset contains 29 million text-retrieval training pairs across 27 diverse languages. It is one of the largest synthetic multilingual datasets generated using PaLM 2 on Wikipedia! 🔥🔥",
"raw": "🦢 The SWIM-IR dataset contains 29 million text-retrieval training pairs across 27 diverse languages. It is one of the largest synthetic multilingual datasets generated using PaLM 2 on Wikipedia! 🔥🔥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "SWIM-IR dataset contains three subsets :",
"raw": "SWIM-IR dataset contains three subsets :",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Cross-lingual:",
"raw": "- Cross-lingual:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`nthakur/swim-ir-cross-lingual`",
"href": null,
"resource": null,
"url": null,
"code": "nthakur/swim-ir-cross-lingual",
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Monolingual: ",
"raw": "- Monolingual: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`nthakur/swim-ir-monolingual`",
"href": null,
"resource": null,
"url": null,
"code": "nthakur/swim-ir-monolingual",
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Indic Cross-lingual: ",
"raw": "- Indic Cross-lingual: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`nthakur/indic-swim-ir-cross-lingual`",
"href": null,
"resource": null,
"url": null,
"code": "nthakur/indic-swim-ir-cross-lingual",
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Check it out:",
"raw": "Check it out:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/collections/nthakur/swim-ir-dataset-662ddaecfc20896bf14dd9b7",
"href": "https://huggingface.co/collections/nthakur/swim-ir-dataset-662ddaecfc20896bf14dd9b7",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🦢 The SWIM-IR dataset contains 29 million text-retrieval training pairs across 27 diverse languages. It is one of the largest synthetic multilingual datasets generated using PaLM 2 on Wikipedia! 🔥🔥
SWIM-IR dataset contains three subsets :
- Cross-lingual:`nthakur/swim-ir-cross-lingual`
- Monolingual: `nthakur/swim-ir-monolingual`
- Indic Cross-lingual: `nthakur/indic-swim-ir-cross-lingual`
Check it out:
https://huggingface.co/collections/nthakur/swim-ir-dataset-662ddaecfc20896bf14dd9b7 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1612277330660-noauth.jpeg",
"fullname": "Nandan Thakur",
"name": "nthakur",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 10,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"lunarflu",
"tomaarsen",
"louisbrulenaudet"
],
"count": 3
},
{
"reaction": "👀",
"users": [
"Tonic"
],
"count": 1
},
{
"reaction": "🤯",
"users": [
"Tonic"
],
"count": 1
}
] | 2024-04-28T18:00:00.000Z | 2024-04-28T19:27:15.118Z | [] | /posts/nthakur/455668761137050 | 3,255 | 0 |
865325680799737 | [
{
"type": "text",
"value": "🔥 Transfer model's Chat feature, Context length and Knowledge to another under 1 minute without any train.",
"raw": "🔥 Transfer model's Chat feature, Context length and Knowledge to another under 1 minute without any train.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Imagine being able to create chat models, expand context, and transfer domain-specific knowledge to models, all within a matter of minutes. Our innovative approach, based on a combination of diff-based techniques and sigmoid ratio calculations, makes this possible.",
"raw": "Imagine being able to create chat models, expand context, and transfer domain-specific knowledge to models, all within a matter of minutes. Our innovative approach, based on a combination of diff-based techniques and sigmoid ratio calculations, makes this possible.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "By considering the diffs between the desired information model (long context or chat) and the base model, as well as the diffs between the base model and the target model, we can efficiently transfer features and expand context without the need for extensive training or resources.",
"raw": "By considering the diffs between the desired information model (long context or chat) and the base model, as well as the diffs between the base model and the target model, we can efficiently transfer features and expand context without the need for extensive training or resources.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Our method minimizes model degradation and ensures that only the desired information is captured, resulting in high-quality models that can be created with just a single click. Whether you need a chat model, expanded context, or domain-specific knowledge transfer, our approach offers a rapid and effective solution.",
"raw": "Our method minimizes model degradation and ensures that only the desired information is captured, resulting in high-quality models that can be created with just a single click. Whether you need a chat model, expanded context, or domain-specific knowledge transfer, our approach offers a rapid and effective solution.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "In blog post below, we will dive into the details of our method, provide code examples, and showcase the impressive results achieved using our approach. Get ready to revolutionize your model creation process and unlock new possibilities with this powerful technique.",
"raw": "In blog post below, we will dive into the details of our method, provide code examples, and showcase the impressive results achieved using our approach. Get ready to revolutionize your model creation process and unlock new possibilities with this powerful technique.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Blog - ",
"raw": "Blog - ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co/blog/maywell/llm-feature-transfer",
"href": "https://huggingface.co/blog/maywell/llm-feature-transfer",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🔥 Transfer model's Chat feature, Context length and Knowledge to another under 1 minute without any train.
Imagine being able to create chat models, expand context, and transfer domain-specific knowledge to models, all within a matter of minutes. Our innovative approach, based on a combination of diff-based techniques and sigmoid ratio calculations, makes this possible.
By considering the diffs between the desired information model (long context or chat) and the base model, as well as the diffs between the base model and the target model, we can efficiently transfer features and expand context without the need for extensive training or resources.
Our method minimizes model degradation and ensures that only the desired information is captured, resulting in high-quality models that can be created with just a single click. Whether you need a chat model, expanded context, or domain-specific knowledge transfer, our approach offers a rapid and effective solution.
In blog post below, we will dive into the details of our method, provide code examples, and showcase the impressive results achieved using our approach. Get ready to revolutionize your model creation process and unlock new possibilities with this powerful technique.
Blog - https://huggingface.co/blog/maywell/llm-feature-transfer | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6439f43a1514b7ee7fb616a1/aFhmyAoicv3zcWKYZ27Z_.png",
"fullname": "Jeonghwan Park",
"name": "maywell",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 298,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"maywell",
"IHaBiS",
"lunarflu",
"Unggi",
"beomi",
"victor",
"julien-c",
"FengJ",
"DeathGodlike",
"hivaze",
"kaki-paper",
"hiauiarau"
],
"count": 12
},
{
"reaction": "👍",
"users": [
"IHaBiS",
"lunarflu",
"beomi",
"celarlin",
"scoop777",
"sappho192",
"kaki-paper"
],
"count": 7
}
] | 2024-04-28T12:48:23.000Z | 2024-05-22T04:11:12.021Z | [
{
"avatarUrl": "/avatars/7fa9de162694d34a214ccd8ecb02fa0a.svg",
"fullname": "Sergey Zubrilin",
"name": "hiauiarau",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6439f43a1514b7ee7fb616a1/aFhmyAoicv3zcWKYZ27Z_.png",
"fullname": "Jeonghwan Park",
"name": "maywell",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 298,
"isFollowing": false
}
] | /posts/maywell/865325680799737 | 8,500 | 2 |
553900179224995 | [
{
"type": "text",
"value": "Soon new releases on NeverSleep 👀",
"raw": "Soon new releases on NeverSleep 👀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "8B/70B Llama3 RP fine-tune in the work!",
"raw": "8B/70B Llama3 RP fine-tune in the work!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Soon new releases on NeverSleep 👀
8B/70B Llama3 RP fine-tune in the work! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63ab1241ad514ca8d1430003/d-43TcOxG-zqAbzrH2m7H.png",
"fullname": "Undi",
"name": "Undi95",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3311,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🚀",
"users": [
"IkariDev",
"Ainonake",
"saishf",
"xxx777xxxASD",
"bluuwhale",
"Andrewwwwww",
"joujiboi",
"lunarflu",
"Nacholmo",
"antiven0m",
"EveryPizza",
"Blobbinski",
"ZootZootTesla",
"beberik",
"victor",
"WesPro",
"kleberbaum",
"den0620",
"TeoR4eg",
"Tillx85",
"HailJebus",
"ABX-AI"
],
"count": 22
},
{
"reaction": "🔥",
"users": [
"ggnick",
"Ainonake",
"Andrewwwwww",
"Th3Nomad",
"lunarflu",
"s0lu",
"Meggido",
"ZootZootTesla",
"bharathchalla",
"MrHillsss",
"den0620",
"Kenshiro-28",
"ABX-AI"
],
"count": 13
},
{
"reaction": "❤️",
"users": [
"saishf",
"Andrewwwwww",
"lunarflu",
"UniversalLove333",
"yashiz",
"scoop777",
"ZootZootTesla",
"den0620",
"Midgardsormr",
"Shibi181",
"ABX-AI"
],
"count": 11
},
{
"reaction": "👀",
"users": [
"Lewdiculous"
],
"count": 1
}
] | 2024-04-28T10:44:59.000Z | 2024-04-30T16:56:03.304Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1667690511692-630dfb008df86f1e5becadc3.png",
"fullname": "IkariDev",
"name": "IkariDev",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 262,
"isFollowing": false
},
{
"avatarUrl": "/avatars/80eb489f00cf499ab4d87ff349102222.svg",
"fullname": "No Name",
"name": "Ainonake",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 4,
"isFollowing": false
}
] | /posts/Undi95/553900179224995 | 9,611 | 2 |
538000120580275 | [
{
"type": "text",
"value": "I created a Twitter account a while back. I finally decided to make it public SebastianG74019. For those of you following ",
"raw": "I created a Twitter account a while back. I finally decided to make it public SebastianG74019. For those of you following ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Locutusque",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Locutusque",
"label": null,
"lang": null
},
{
"type": "text",
"value": " on Twitter, that is not me! 😂",
"raw": " on Twitter, that is not me! 😂",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I created a Twitter account a while back. I finally decided to make it public SebastianG74019. For those of you following @Locutusque on Twitter, that is not me! 😂
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg",
"fullname": "Sebastian Gabarain",
"name": "Locutusque",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 180,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg",
"fullname": "Sebastian Gabarain",
"name": "Locutusque",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 180
}
] | [
{
"reaction": "🤯",
"users": [
"lunarflu",
"victor"
],
"count": 2
}
] | 2024-04-28T02:28:33.000Z | 2024-04-29T07:35:46.982Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg",
"fullname": "Joseph [open/acc] Pollack",
"name": "Tonic",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 313,
"isFollowing": false
}
] | /posts/Locutusque/538000120580275 | 4,397 | 2 |
185585910237659 | [
{
"type": "text",
"value": "VTuber Logo Generator❤️🪄⭐️ by ",
"raw": "VTuber Logo Generator❤️🪄⭐️ by ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@gojiteji",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "gojiteji",
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/gojiteji/VTuberLogoGenerator",
"href": null,
"resource": {
"type": "space",
"id": "gojiteji/VTuberLogoGenerator",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/gojiteji/VTuberLogoGenerator",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "How this works:",
"raw": "How this works:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- mistralai/Mixtral-8x7B-Instruct-v0.1 for Japanese transliteration.",
"raw": "- mistralai/Mixtral-8x7B-Instruct-v0.1 for Japanese transliteration.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Stable Diffusion 3 for logo generation.",
"raw": "- Stable Diffusion 3 for logo generation.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- simple k-means for color selection.",
"raw": "- simple k-means for color selection.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | VTuber Logo Generator❤️🪄⭐️ by @gojiteji
https://huggingface.co/spaces/gojiteji/VTuberLogoGenerator
How this works:
- mistralai/Mixtral-8x7B-Instruct-v0.1 for Japanese transliteration.
- Stable Diffusion 3 for logo generation.
- simple k-means for color selection. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/60799266921db717010c7c86/hBckHFJNYXFOBpccr_RGj.jpeg",
"fullname": "Koki Tanaka",
"name": "gojiteji",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 13,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60799266921db717010c7c86/3ZU2Mg9hHg-Yiv_sS0_0M.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60799266921db717010c7c86/AqFXLHONwMczFEmxTqhVK.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60799266921db717010c7c86/vL3VvjdIUop5mhuwu48zI.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60799266921db717010c7c86/dMQJ1-LvFwhIi875eZC8r.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60799266921db717010c7c86/Jfnfr1cDW1LbmizVx0nsT.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/60799266921db717010c7c86/hBckHFJNYXFOBpccr_RGj.jpeg",
"fullname": "Koki Tanaka",
"name": "gojiteji",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 13
}
] | [
{
"reaction": "🔥",
"users": [
"KvrParaskevi",
"dillfrescott",
"lunarflu",
"grx96",
"victor",
"julien-c",
"willowill5",
"pabloce",
"not-lain"
],
"count": 9
},
{
"reaction": "🧠",
"users": [
"coderjuzi",
"dillfrescott",
"lunarflu",
"not-lain"
],
"count": 4
}
] | 2024-04-27T23:30:32.000Z | 2024-10-22T17:35:10.805Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg",
"fullname": "Julien Chaumond",
"name": "julien-c",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 1580,
"isFollowing": false
},
{
"avatarUrl": "/avatars/eeff2542d5bc165a78ba25d858b96f06.svg",
"fullname": "Kevin Webber",
"name": "Buck3tHead",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "/avatars/172868adc85bbc12dd03b6443decbaa3.svg",
"fullname": "Nicoly Boaventura",
"name": "nickymilky",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/gojiteji/185585910237659 | 4,176 | 5 |
861076380823183 | [
{
"type": "text",
"value": "New update to mlx-rag-gguf: ",
"raw": "New update to mlx-rag-gguf: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- mlx supported phi-3-mini-4k gguf weight.",
"raw": "- mlx supported phi-3-mini-4k gguf weight.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- support for other gguf weights (llama arch) 4 & 8 bits quantized.",
"raw": "- support for other gguf weights (llama arch) 4 & 8 bits quantized.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "repo: ",
"raw": "repo: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/Jaykef/mlx-rag-gguf",
"href": "https://github.com/Jaykef/mlx-rag-gguf",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "model ",
"raw": "model ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/Jaward/phi-3-mini-4k-instruct.Q4_0.gguf",
"href": null,
"resource": {
"type": "model",
"id": "Jaward/phi-3-mini-4k-instruct.Q4_0.gguf",
"discussionNum": null
},
"url": "https://huggingface.co/Jaward/phi-3-mini-4k-instruct.Q4_0.gguf",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | New update to mlx-rag-gguf:
- mlx supported phi-3-mini-4k gguf weight.
- support for other gguf weights (llama arch) 4 & 8 bits quantized.
repo: https://github.com/Jaykef/mlx-rag-gguf
model https://huggingface.co/Jaward/phi-3-mini-4k-instruct.Q4_0.gguf | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/IpZbGX6h27NkzzfnSYPM5.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"lunarflu",
"victor",
"dillfrescott"
],
"count": 3
}
] | 2024-04-27T16:44:11.000Z | 2024-04-27T23:55:27.908Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
}
] | /posts/Jaward/861076380823183 | 2,406 | 1 |
441438298321602 | [
{
"type": "text",
"value": "New SDXL model:",
"raw": "New SDXL model:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | New SDXL model: | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1678181702571-619f7ba90df8731e0d8b6c54.jpeg",
"fullname": "Kadir Nar",
"name": "kadirnar",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 198,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/ZbBU77FQG8OtaktkK0LJm.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/Y7YRx8CLYuxkDtRsaBMER.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/eVTOe0KJXnUtf0-ZPe7nd.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/iaePF_7fpW3PghvRoLDhZ.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"zaursamedov1",
"dgallitelli",
"ameerazam08",
"gojiteji",
"svjack",
"lunarflu",
"victor",
"Yasmanyel3men2",
"not-lain",
"danielus"
],
"count": 10
}
] | 2024-04-27T14:04:47.000Z | 2024-04-29T09:52:15.077Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg",
"fullname": "Adam Molnar",
"name": "lunarflu",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 333,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/X7QKoiXbUtEZSG9jyvfk3.jpeg",
"fullname": "Victor Mustar",
"name": "victor",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 2607,
"isFollowing": false
}
] | /posts/kadirnar/441438298321602 | 2,705 | 2 |
826184983427067 | [
{
"type": "text",
"value": "Layer Skip",
"raw": "Layer Skip",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Enabling Early Exit Inference and Self-Speculative Decoding",
"raw": "Enabling Early Exit Inference and Self-Speculative Decoding",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/papers/2404.16710",
"href": null,
"resource": {
"type": "paper",
"id": "2404.16710",
"discussionNum": null
},
"url": "https://huggingface.co/papers/2404.16710",
"code": null,
"user": null,
"label": "LayerSkip: Enabling Early Exit Inference and Self-Speculative Decoding (2404.16710)",
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We present LayerSkip, an end-to-end solution to speed-up inference of large language models (LLMs). First, during training we apply layer dropout, with low dropout rates for earlier layers and higher dropout rates for later layers, and an early exit loss where all transformer layers share the same exit. Second, during inference, we show that this training recipe increases the accuracy of early exit at earlier layers, without adding any auxiliary layers or modules to the model. Third, we present a novel self-speculative decoding solution where we exit at early layers and verify and correct with remaining layers of the model. Our proposed self-speculative decoding approach has less memory footprint than other speculative decoding approaches and benefits from shared compute and activations of the draft and verification stages. We run experiments on different Llama model sizes on different types of training: pretraining from scratch, continual pretraining, finetuning on specific data domain, and finetuning on specific task. We implement our inference solution and show speedups of up to 2.16x on summarization for CNN/DM documents, 1.82x on coding, and 2.0x on TOPv2 semantic parsing task.",
"raw": "We present LayerSkip, an end-to-end solution to speed-up inference of large language models (LLMs). First, during training we apply layer dropout, with low dropout rates for earlier layers and higher dropout rates for later layers, and an early exit loss where all transformer layers share the same exit. Second, during inference, we show that this training recipe increases the accuracy of early exit at earlier layers, without adding any auxiliary layers or modules to the model. Third, we present a novel self-speculative decoding solution where we exit at early layers and verify and correct with remaining layers of the model. Our proposed self-speculative decoding approach has less memory footprint than other speculative decoding approaches and benefits from shared compute and activations of the draft and verification stages. We run experiments on different Llama model sizes on different types of training: pretraining from scratch, continual pretraining, finetuning on specific data domain, and finetuning on specific task. We implement our inference solution and show speedups of up to 2.16x on summarization for CNN/DM documents, 1.82x on coding, and 2.0x on TOPv2 semantic parsing task.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Layer Skip
Enabling Early Exit Inference and Self-Speculative Decoding
https://huggingface.co/papers/2404.16710
We present LayerSkip, an end-to-end solution to speed-up inference of large language models (LLMs). First, during training we apply layer dropout, with low dropout rates for earlier layers and higher dropout rates for later layers, and an early exit loss where all transformer layers share the same exit. Second, during inference, we show that this training recipe increases the accuracy of early exit at earlier layers, without adding any auxiliary layers or modules to the model. Third, we present a novel self-speculative decoding solution where we exit at early layers and verify and correct with remaining layers of the model. Our proposed self-speculative decoding approach has less memory footprint than other speculative decoding approaches and benefits from shared compute and activations of the draft and verification stages. We run experiments on different Llama model sizes on different types of training: pretraining from scratch, continual pretraining, finetuning on specific data domain, and finetuning on specific task. We implement our inference solution and show speedups of up to 2.16x on summarization for CNN/DM documents, 1.82x on coding, and 2.0x on TOPv2 semantic parsing task.
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg",
"fullname": "AK",
"name": "akhaliq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5205,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/YUZh-6lIBhcuEGHQYDi9C.png"
}
] | [] | [
{
"reaction": "🤗",
"users": [
"Squadgroup1234",
"Chunte",
"gojiteji",
"samadpls",
"yizhilll",
"travisking",
"QuocKhanh",
"vincentmichael089"
],
"count": 8
},
{
"reaction": "🧠",
"users": [
"samadpls",
"QuocKhanh"
],
"count": 2
},
{
"reaction": "➕",
"users": [
"gojiteji"
],
"count": 1
}
] | 2024-04-27T05:30:57.000Z | 2024-04-27T05:31:17.136Z | [] | /posts/akhaliq/826184983427067 | 4,601 | 0 |
563826222170355 | [
{
"type": "text",
"value": "Wow i can post on HF now! ",
"raw": "Wow i can post on HF now! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Love HF so much 🤗❤️",
"raw": "Love HF so much 🤗❤️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Wow i can post on HF now!
Love HF so much 🤗❤️ | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6412be271e42164b9f13f177/SvtEKEd6aYtSJLHZ33Rsv.png",
"fullname": "Sunyoung Hwang",
"name": "sosoai",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 16,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🤗",
"users": [
"Chunte",
"gojiteji",
"Moibe",
"julien-c",
"not-lain",
"Tonic",
"SvCy",
"DavidGF"
],
"count": 8
}
] | 2024-04-27T05:17:31.000Z | 2024-04-30T13:07:26.733Z | [
{
"avatarUrl": "/avatars/f73870271500143645ca25541a5448a1.svg",
"fullname": "Castro Silva Neto",
"name": "Newtoneto",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg",
"fullname": "Julien Chaumond",
"name": "julien-c",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 1580,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64b975b696676e40d0ea08aa/fXZFY9a6JxvaQt4iUCFzl.jpeg",
"fullname": "Sourav Chakraborty",
"name": "SvCy",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 7,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg",
"fullname": "Nishith Jain",
"name": "KingNish",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1079,
"isFollowing": false
}
] | /posts/sosoai/563826222170355 | 2,959 | 6 |
946832022299198 | [
{
"type": "text",
"value": "Explore the Latest Top Papers with Papers Leaderboard!",
"raw": "Explore the Latest Top Papers with Papers Leaderboard!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We are excited to introduce a new way to explore the most impactful research papers: Papers Leaderboard! This feature allows you to easily find the most talked-about papers across a variety of fields.",
"raw": "We are excited to introduce a new way to explore the most impactful research papers: Papers Leaderboard! This feature allows you to easily find the most talked-about papers across a variety of fields.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Hf-demo : ",
"raw": "Hf-demo : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co/spaces/ameerazam08/Paper-LeaderBoard",
"href": null,
"resource": {
"type": "space",
"id": "ameerazam08/Paper-LeaderBoard",
"discussionNum": null
},
"url": "https://huggingface.co/spaces/ameerazam08/Paper-LeaderBoard",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Happy weekends!",
"raw": "Happy weekends!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Explore the Latest Top Papers with Papers Leaderboard!
We are excited to introduce a new way to explore the most impactful research papers: Papers Leaderboard! This feature allows you to easily find the most talked-about papers across a variety of fields.
Hf-demo : https://huggingface.co/spaces/ameerazam08/Paper-LeaderBoard
Happy weekends! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6266513d539521e602b5dc3a/qg0fmVTGNKEFL7feyvQNh.png",
"fullname": "Ameer Azam",
"name": "ameerazam08",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 77,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"Chunte",
"ameerazam08",
"ameerazampixis"
],
"count": 3
}
] | 2024-04-26T22:07:30.000Z | 2024-04-26T22:10:24.848Z | [] | /posts/ameerazam08/946832022299198 | 4,036 | 0 |
125379785468349 | [
{
"type": "text",
"value": "Yay we got 500K+ monthly HF downloads on our Unsloth HF repo! :) Super appreciate everyone in the OSS community - and thanks for using Unsloth!!",
"raw": "Yay we got 500K+ monthly HF downloads on our Unsloth HF repo! :) Super appreciate everyone in the OSS community - and thanks for using Unsloth!!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Yay we got 500K+ monthly HF downloads on our Unsloth HF repo! :) Super appreciate everyone in the OSS community - and thanks for using Unsloth!! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62ecdc18b72a69615d6bd857/ixLCk0TwaCVyL_nAfrgEs.png",
"fullname": "Daniel Han-Chen",
"name": "danielhanchen",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 193,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/62ecdc18b72a69615d6bd857/Cwnm4lq7DtNq_LbdJUpF3.jpeg"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"clem",
"giux78",
"lunarflu",
"nbroad",
"alielfilali01",
"Kukedlc",
"Oscar007",
"Dlbk",
"Chunte",
"catastropiyush",
"AtAndDev",
"julien-c",
"duxx",
"damerajee",
"feeltheAGI",
"beomi"
],
"count": 16
},
{
"reaction": "❤️",
"users": [
"clem",
"lunarflu",
"fredericmenezes",
"samusenps",
"nbroad",
"alielfilali01",
"Kukedlc",
"Chunte",
"AtAndDev",
"f0ster",
"julien-c",
"louisbrulenaudet",
"damerajee",
"aloobun",
"beomi"
],
"count": 15
},
{
"reaction": "🤗",
"users": [
"Kukedlc",
"Oscar007",
"AtAndDev",
"damerajee",
"Ji-Xiang",
"beomi"
],
"count": 6
}
] | 2024-04-26T18:44:49.000Z | 2024-04-27T17:30:31.925Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg",
"fullname": "Clem 🤗",
"name": "clem",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 1763,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1639773384591-5f353bb37e58354338621655.jpeg",
"fullname": "Nicholas Broad",
"name": "nbroad",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 92,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/62ecdc18b72a69615d6bd857/ixLCk0TwaCVyL_nAfrgEs.png",
"fullname": "Daniel Han-Chen",
"name": "danielhanchen",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 193,
"isFollowing": false
}
] | /posts/danielhanchen/125379785468349 | 3,609 | 4 |
756454165688036 | [
{
"type": "mention",
"value": null,
"raw": "@clem",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "clem",
"label": null,
"lang": null
},
{
"type": "text",
"value": " Happy to be HF'er :) ",
"raw": " Happy to be HF'er :) ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | @clem Happy to be HF'er :) | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f5c73e9c81260ff87a3b4/w1ej8dhwV02F6tBfvP3zx.jpeg",
"fullname": "zaur samedov",
"name": "zaursamedov1",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 19,
"isFollowing": false
} | [] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg",
"fullname": "Clem 🤗",
"name": "clem",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 1763
}
] | [
{
"reaction": "❤️",
"users": [
"clem",
"lunarflu",
"samusenps",
"Dlbk",
"julien-c",
"not-lain",
"zaursamedov1",
"GPT007"
],
"count": 8
}
] | 2024-04-26T17:58:40.000Z | 2024-04-26T17:58:54.056Z | [] | /posts/zaursamedov1/756454165688036 | 2,194 | 0 |
Subsets and Splits