machineuser commited on
Commit
badbf33
1 Parent(s): bdf492b

Sync widgets demo

Browse files
packages/tasks/src/tasks/audio-classification/about.md CHANGED
@@ -53,7 +53,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
53
  ```javascript
54
  import { HfInference } from "@huggingface/inference";
55
 
56
- const inference = new HfInference(HF_ACCESS_TOKEN);
57
  await inference.audioClassification({
58
  data: await (await fetch("sample.flac")).blob(),
59
  model: "facebook/mms-lid-126",
 
53
  ```javascript
54
  import { HfInference } from "@huggingface/inference";
55
 
56
+ const inference = new HfInference(HF_TOKEN);
57
  await inference.audioClassification({
58
  data: await (await fetch("sample.flac")).blob(),
59
  model: "facebook/mms-lid-126",
packages/tasks/src/tasks/audio-to-audio/about.md CHANGED
@@ -35,7 +35,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
35
  ```javascript
36
  import { HfInference } from "@huggingface/inference";
37
 
38
- const inference = new HfInference(HF_ACCESS_TOKEN);
39
  await inference.audioToAudio({
40
  data: await (await fetch("sample.flac")).blob(),
41
  model: "speechbrain/sepformer-wham",
 
35
  ```javascript
36
  import { HfInference } from "@huggingface/inference";
37
 
38
+ const inference = new HfInference(HF_TOKEN);
39
  await inference.audioToAudio({
40
  data: await (await fetch("sample.flac")).blob(),
41
  model: "speechbrain/sepformer-wham",
packages/tasks/src/tasks/automatic-speech-recognition/about.md CHANGED
@@ -54,7 +54,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to t
54
  ```javascript
55
  import { HfInference } from "@huggingface/inference";
56
 
57
- const inference = new HfInference(HF_ACCESS_TOKEN);
58
  await inference.automaticSpeechRecognition({
59
  data: await (await fetch("sample.flac")).blob(),
60
  model: "openai/whisper-large-v2",
 
54
  ```javascript
55
  import { HfInference } from "@huggingface/inference";
56
 
57
+ const inference = new HfInference(HF_TOKEN);
58
  await inference.automaticSpeechRecognition({
59
  data: await (await fetch("sample.flac")).blob(),
60
  model: "openai/whisper-large-v2",
packages/tasks/src/tasks/conversational/about.md CHANGED
@@ -34,7 +34,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
34
  ```javascript
35
  import { HfInference } from "@huggingface/inference";
36
 
37
- const inference = new HfInference(HF_ACCESS_TOKEN);
38
  await inference.conversational({
39
  model: "facebook/blenderbot-400M-distill",
40
  inputs: "Going to the movies tonight - any suggestions?",
 
34
  ```javascript
35
  import { HfInference } from "@huggingface/inference";
36
 
37
+ const inference = new HfInference(HF_TOKEN);
38
  await inference.conversational({
39
  model: "facebook/blenderbot-400M-distill",
40
  inputs: "Going to the movies tonight - any suggestions?",
packages/tasks/src/tasks/image-classification/about.md CHANGED
@@ -29,7 +29,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to c
29
  ```javascript
30
  import { HfInference } from "@huggingface/inference";
31
 
32
- const inference = new HfInference(HF_ACCESS_TOKEN);
33
  await inference.imageClassification({
34
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
35
  model: "microsoft/resnet-50",
 
29
  ```javascript
30
  import { HfInference } from "@huggingface/inference";
31
 
32
+ const inference = new HfInference(HF_TOKEN);
33
  await inference.imageClassification({
34
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
35
  model: "microsoft/resnet-50",
packages/tasks/src/tasks/image-segmentation/about.md CHANGED
@@ -45,7 +45,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
45
  ```javascript
46
  import { HfInference } from "@huggingface/inference";
47
 
48
- const inference = new HfInference(HF_ACCESS_TOKEN);
49
  await inference.imageSegmentation({
50
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
51
  model: "facebook/detr-resnet-50-panoptic",
 
45
  ```javascript
46
  import { HfInference } from "@huggingface/inference";
47
 
48
+ const inference = new HfInference(HF_TOKEN);
49
  await inference.imageSegmentation({
50
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
51
  model: "facebook/detr-resnet-50-panoptic",
packages/tasks/src/tasks/image-to-image/about.md CHANGED
@@ -43,7 +43,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
43
  ```javascript
44
  import { HfInference } from "@huggingface/inference";
45
 
46
- const inference = new HfInference(HF_ACCESS_TOKEN);
47
  await inference.imageToImage({
48
  data: await (await fetch("image")).blob(),
49
  model: "timbrooks/instruct-pix2pix",
 
43
  ```javascript
44
  import { HfInference } from "@huggingface/inference";
45
 
46
+ const inference = new HfInference(HF_TOKEN);
47
  await inference.imageToImage({
48
  data: await (await fetch("image")).blob(),
49
  model: "timbrooks/instruct-pix2pix",
packages/tasks/src/tasks/image-to-text/about.md CHANGED
@@ -48,7 +48,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
48
  ```javascript
49
  import { HfInference } from "@huggingface/inference";
50
 
51
- const inference = new HfInference(HF_ACCESS_TOKEN);
52
  await inference.imageToText({
53
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
54
  model: "Salesforce/blip-image-captioning-base",
 
48
  ```javascript
49
  import { HfInference } from "@huggingface/inference";
50
 
51
+ const inference = new HfInference(HF_TOKEN);
52
  await inference.imageToText({
53
  data: await (await fetch("https://picsum.photos/300/300")).blob(),
54
  model: "Salesforce/blip-image-captioning-base",
packages/tasks/src/tasks/summarization/about.md CHANGED
@@ -25,7 +25,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
25
  ```javascript
26
  import { HfInference } from "@huggingface/inference";
27
 
28
- const inference = new HfInference(HF_ACCESS_TOKEN);
29
  const inputs =
30
  "Paris is the capital and most populous city of France, with an estimated population of 2,175,601 residents as of 2018, in an area of more than 105 square kilometres (41 square miles). The City of Paris is the centre and seat of government of the region and province of Île-de-France, or Paris Region, which has an estimated population of 12,174,880, or about 18 percent of the population of France as of 2017.";
31
 
 
25
  ```javascript
26
  import { HfInference } from "@huggingface/inference";
27
 
28
+ const inference = new HfInference(HF_TOKEN);
29
  const inputs =
30
  "Paris is the capital and most populous city of France, with an estimated population of 2,175,601 residents as of 2018, in an area of more than 105 square kilometres (41 square miles). The City of Paris is the centre and seat of government of the region and province of Île-de-France, or Paris Region, which has an estimated population of 12,174,880, or about 18 percent of the population of France as of 2017.";
31
 
packages/tasks/src/tasks/text-classification/about.md CHANGED
@@ -117,7 +117,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
117
  ```javascript
118
  import { HfInference } from "@huggingface/inference";
119
 
120
- const inference = new HfInference(HF_ACCESS_TOKEN);
121
  await inference.conversational({
122
  model: "distilbert-base-uncased-finetuned-sst-2-english",
123
  inputs: "I love this movie!",
 
117
  ```javascript
118
  import { HfInference } from "@huggingface/inference";
119
 
120
+ const inference = new HfInference(HF_TOKEN);
121
  await inference.conversational({
122
  model: "distilbert-base-uncased-finetuned-sst-2-english",
123
  inputs: "I love this movie!",
packages/tasks/src/tasks/text-generation/about.md CHANGED
@@ -72,7 +72,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
72
  ```javascript
73
  import { HfInference } from "@huggingface/inference";
74
 
75
- const inference = new HfInference(HF_ACCESS_TOKEN);
76
  await inference.conversational({
77
  model: "distilbert-base-uncased-finetuned-sst-2-english",
78
  inputs: "I love this movie!",
 
72
  ```javascript
73
  import { HfInference } from "@huggingface/inference";
74
 
75
+ const inference = new HfInference(HF_TOKEN);
76
  await inference.conversational({
77
  model: "distilbert-base-uncased-finetuned-sst-2-english",
78
  inputs: "I love this movie!",
packages/tasks/src/tasks/text-to-image/about.md CHANGED
@@ -41,7 +41,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
41
  ```javascript
42
  import { HfInference } from "@huggingface/inference";
43
 
44
- const inference = new HfInference(HF_ACCESS_TOKEN);
45
  await inference.textToImage({
46
  model: "stabilityai/stable-diffusion-2",
47
  inputs: "award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
 
41
  ```javascript
42
  import { HfInference } from "@huggingface/inference";
43
 
44
+ const inference = new HfInference(HF_TOKEN);
45
  await inference.textToImage({
46
  model: "stabilityai/stable-diffusion-2",
47
  inputs: "award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
packages/tasks/src/tasks/text-to-speech/about.md CHANGED
@@ -47,7 +47,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
47
  ```javascript
48
  import { HfInference } from "@huggingface/inference";
49
 
50
- const inference = new HfInference(HF_ACCESS_TOKEN);
51
  await inference.textToSpeech({
52
  model: "facebook/mms-tts",
53
  inputs: "text to generate speech from",
 
47
  ```javascript
48
  import { HfInference } from "@huggingface/inference";
49
 
50
+ const inference = new HfInference(HF_TOKEN);
51
  await inference.textToSpeech({
52
  model: "facebook/mms-tts",
53
  inputs: "text to generate speech from",
packages/tasks/src/tasks/translation/about.md CHANGED
@@ -37,7 +37,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
37
  ```javascript
38
  import { HfInference } from "@huggingface/inference";
39
 
40
- const inference = new HfInference(HF_ACCESS_TOKEN);
41
  await inference.translation({
42
  model: "t5-base",
43
  inputs: "My name is Wolfgang and I live in Berlin",
 
37
  ```javascript
38
  import { HfInference } from "@huggingface/inference";
39
 
40
+ const inference = new HfInference(HF_TOKEN);
41
  await inference.translation({
42
  model: "t5-base",
43
  inputs: "My name is Wolfgang and I live in Berlin",