ok done
Browse files- docs/components/timeline.js +1 -1
- docs/components/tree.js +15 -0
- docs/data/events.json +0 -8
- docs/data/events.json.js +19 -0
- docs/data/tags.json.js +27 -0
- docs/index.md +14 -0
docs/components/timeline.js
CHANGED
@@ -10,7 +10,7 @@ export function timeline(events, {width, height} = {}) {
|
|
10 |
marks: [
|
11 |
Plot.ruleX(events, {x: "year", y: "y", markerEnd: "dot", strokeWidth: 2.5}),
|
12 |
Plot.ruleY([0]),
|
13 |
-
Plot.text(events, {x: "year", y: "y", text: "name", lineAnchor: "bottom", dy: -10, lineWidth: 10, fontSize: 12})
|
14 |
]
|
15 |
});
|
16 |
}
|
|
|
10 |
marks: [
|
11 |
Plot.ruleX(events, {x: "year", y: "y", markerEnd: "dot", strokeWidth: 2.5}),
|
12 |
Plot.ruleY([0]),
|
13 |
+
// Plot.text(events, {x: "year", y: "y", text: "name", lineAnchor: "bottom", dy: -10, lineWidth: 10, fontSize: 12})
|
14 |
]
|
15 |
});
|
16 |
}
|
docs/components/tree.js
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import * as Plot from "npm:@observablehq/plot";
|
2 |
+
|
3 |
+
export function tree(tags, {width, height} = {}) {
|
4 |
+
return Plot.plot({
|
5 |
+
axis: null,
|
6 |
+
width,
|
7 |
+
height,
|
8 |
+
margin: 10,
|
9 |
+
marginLeft: 40,
|
10 |
+
marginRight: 120,
|
11 |
+
marks: [
|
12 |
+
Plot.tree(tags, {textStroke: "white"})
|
13 |
+
]
|
14 |
+
});
|
15 |
+
}
|
docs/data/events.json
DELETED
@@ -1,8 +0,0 @@
|
|
1 |
-
[
|
2 |
-
{"name": "Sputnik 1", "year": 1957, "y": 10},
|
3 |
-
{"name": "Apollo 11", "year": 1969, "y": 20},
|
4 |
-
{"name": "Viking 1 and 2", "year": 1975, "y": 30},
|
5 |
-
{"name": "Space Shuttle Columbia", "year": 1981, "y": 40},
|
6 |
-
{"name": "Hubble Space Telescope", "year": 1990, "y": 50},
|
7 |
-
{"name": "ISS Construction", "year": 1998, "y": 60}
|
8 |
-
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
docs/data/events.json.js
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
async function json() {
|
2 |
+
// const response = await fetch(url);
|
3 |
+
// if (!response.ok) throw new Error(`fetch failed: ${response.status}`);
|
4 |
+
// return await response.json();
|
5 |
+
return [
|
6 |
+
{"name": "Sputnik 1", "year": 2020, "y": 1},
|
7 |
+
{"name": "Apollo 11", "year": 2021, "y": 2},
|
8 |
+
{"name": "Viking 1 and 2", "year": 2022, "y": 4},
|
9 |
+
{"name": "Space Shuttle Columbia", "year": 2023, "y": 8},
|
10 |
+
{"name": "Hubble Space Telescope", "year": 2024, "y": 16},
|
11 |
+
{"name": "ISS Construction", "year": 2025, "y": 32}
|
12 |
+
];
|
13 |
+
}
|
14 |
+
|
15 |
+
|
16 |
+
|
17 |
+
const graph = await json();
|
18 |
+
|
19 |
+
process.stdout.write(JSON.stringify(graph));
|
docs/data/tags.json.js
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
async function json() {
|
2 |
+
const response = await fetch(`https://huggingface.co/api/models-tags-by-type`);
|
3 |
+
if (!response.ok) {
|
4 |
+
throw new Error(`fetch failed: ${response.status}`);
|
5 |
+
}
|
6 |
+
const tags = await response.json();
|
7 |
+
const x = Object.entries(tags).map(([k, v]) => {
|
8 |
+
return v.slice(0, 30).map(o => `${k}/${o.id}`);
|
9 |
+
}).flat();
|
10 |
+
return x;
|
11 |
+
// console.log(tags);
|
12 |
+
return [
|
13 |
+
"Chaos/Gaia/Mountains",
|
14 |
+
"Chaos/Gaia/Pontus",
|
15 |
+
"Chaos/Gaia/Uranus",
|
16 |
+
"Chaos/Eros",
|
17 |
+
"Chaos/Erebus",
|
18 |
+
"Chaos/Tartarus"
|
19 |
+
];
|
20 |
+
}
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
const tags = await json();
|
25 |
+
|
26 |
+
process.stdout.write(JSON.stringify(tags));
|
27 |
+
|
docs/index.md
CHANGED
@@ -23,6 +23,20 @@ const events = FileAttachment("./data/events.json").json();
|
|
23 |
timeline(events, {height: 300})
|
24 |
```
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
## LLM generated description based on data above
|
27 |
|
28 |
The evolution of open source models and datasets on the Hugging Face Hub reflects a significant shift in the landscape of machine learning and artificial intelligence. Initially, the Hugging Face Hub started as a repository for Natural Language Processing (NLP) models, primarily focusing on the Transformer architecture, which has been pivotal in advancing AI research and applications. The hub was known for hosting models such as BERT, GPT, and their variants, facilitating easy access for researchers and developers to state-of-the-art models.
|
|
|
23 |
timeline(events, {height: 300})
|
24 |
```
|
25 |
|
26 |
+
## Model tags
|
27 |
+
|
28 |
+
```js
|
29 |
+
import {tree} from "./components/tree.js";
|
30 |
+
const tags = FileAttachment("./data/tags.json").json();
|
31 |
+
```
|
32 |
+
|
33 |
+
The data in the timeline above is static but this data below is dynamically fetched from `https://huggingface.co/api/models-tags-by-type`:
|
34 |
+
|
35 |
+
```js
|
36 |
+
tree(tags, {height: 10000})
|
37 |
+
```
|
38 |
+
|
39 |
+
|
40 |
## LLM generated description based on data above
|
41 |
|
42 |
The evolution of open source models and datasets on the Hugging Face Hub reflects a significant shift in the landscape of machine learning and artificial intelligence. Initially, the Hugging Face Hub started as a repository for Natural Language Processing (NLP) models, primarily focusing on the Transformer architecture, which has been pivotal in advancing AI research and applications. The hub was known for hosting models such as BERT, GPT, and their variants, facilitating easy access for researchers and developers to state-of-the-art models.
|