nsarrazin HF staff commited on
Commit
28e12d0
1 Parent(s): f2c88f7

add support for arbitrary HF endpoints

Browse files
src/lib/agents/llm.ts CHANGED
@@ -1,8 +1,9 @@
1
  import { get } from "svelte/store";
2
- import { OPENAI_API_KEY } from "$lib/store";
3
  import { Configuration, OpenAIApi } from "openai";
4
  import { getInference } from "./getInference";
5
  import type { LLM } from "$lib/types";
 
6
 
7
  async function OpenAILLMCall(prompt: string) {
8
  const openai = new OpenAIApi(
@@ -21,19 +22,30 @@ async function OpenAILLMCall(prompt: string) {
21
  }
22
 
23
  async function HFLLMCall(prompt: string) {
24
- const hf = getInference();
25
-
26
- const formattedPrompt = "<|user|>" + prompt + "<|end|>";
27
-
28
- const generatedOutput = await hf.textGeneration({
29
- inputs: formattedPrompt,
30
- model: "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
31
- parameters: {
32
- max_new_tokens: 512,
33
- },
34
- });
35
-
36
- const text = generatedOutput.generated_text.slice(formattedPrompt.length);
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  return text;
39
  }
 
1
  import { get } from "svelte/store";
2
+ import { OPENAI_API_KEY, HF_ENDPOINT } from "$lib/store";
3
  import { Configuration, OpenAIApi } from "openai";
4
  import { getInference } from "./getInference";
5
  import type { LLM } from "$lib/types";
6
+ import type { TextGenerationOutput } from "@huggingface/inference";
7
 
8
  async function OpenAILLMCall(prompt: string) {
9
  const openai = new OpenAIApi(
 
22
  }
23
 
24
  async function HFLLMCall(prompt: string) {
25
+ const formattedPrompt = "<|user|>" + prompt + "<|end|><|assistant|>";
26
+
27
+ let output: TextGenerationOutput;
28
+
29
+ if (!!get(HF_ENDPOINT)) {
30
+ output = await getInference()
31
+ .endpoint(get(HF_ENDPOINT))
32
+ .textGeneration({
33
+ inputs: formattedPrompt,
34
+ parameters: {
35
+ max_new_tokens: 1400,
36
+ },
37
+ });
38
+ } else {
39
+ output = await getInference().textGeneration({
40
+ inputs: formattedPrompt,
41
+ model: "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
42
+ parameters: {
43
+ max_new_tokens: 900,
44
+ },
45
+ });
46
+ }
47
+
48
+ const text = output.generated_text.slice(formattedPrompt.length);
49
 
50
  return text;
51
  }
src/lib/components/ApiKeyModal.svelte CHANGED
@@ -1,9 +1,9 @@
1
  <script lang="ts">
2
  import { onMount } from "svelte";
3
- import { HF_ACCESS_TOKEN, OPENAI_API_KEY } from "$lib/store";
4
  import { goto } from "$app/navigation";
5
 
6
- let dialogElement: HTMLDialogElement;
7
 
8
  onMount(() => {
9
  if ($HF_ACCESS_TOKEN === "") {
@@ -22,6 +22,7 @@
22
  } else {
23
  localStorage.setItem("HF_ACCESS_TOKEN", $HF_ACCESS_TOKEN);
24
  localStorage.setItem("OPENAI_API_KEY", $OPENAI_API_KEY);
 
25
  goto("/");
26
  }
27
  }}
@@ -42,8 +43,20 @@
42
  bind:value={$HF_ACCESS_TOKEN}
43
  />
44
  </form>
 
 
 
 
 
 
 
 
 
 
 
 
45
  <form aria-label="OPENAI API" class="form-control">
46
- <p class="py-4">
47
  Optionally you can add your OpenAI key to use it as your LLM.
48
  </p>
49
 
 
1
  <script lang="ts">
2
  import { onMount } from "svelte";
3
+ import { HF_ACCESS_TOKEN, OPENAI_API_KEY, HF_ENDPOINT } from "$lib/store";
4
  import { goto } from "$app/navigation";
5
 
6
+ export let dialogElement: HTMLDialogElement;
7
 
8
  onMount(() => {
9
  if ($HF_ACCESS_TOKEN === "") {
 
22
  } else {
23
  localStorage.setItem("HF_ACCESS_TOKEN", $HF_ACCESS_TOKEN);
24
  localStorage.setItem("OPENAI_API_KEY", $OPENAI_API_KEY);
25
+ localStorage.setItem("HF_ENDPOINT", $HF_ENDPOINT);
26
  goto("/");
27
  }
28
  }}
 
43
  bind:value={$HF_ACCESS_TOKEN}
44
  />
45
  </form>
46
+ <form aria-label="HF ENDPOINT" class="form-control">
47
+ <label for="hf_endpoint" class="label-text pb-2">HF Endpoint</label>
48
+ <input
49
+ class="input input-primary"
50
+ name="hf_endpoint"
51
+ type="text"
52
+ placeholder="Leave empty to use the default endpoint"
53
+ bind:value={$HF_ENDPOINT}
54
+ />
55
+ </form>
56
+
57
+ <div class="divider my-0" />
58
  <form aria-label="OPENAI API" class="form-control">
59
+ <p class="pb-4">
60
  Optionally you can add your OpenAI key to use it as your LLM.
61
  </p>
62
 
src/lib/store.ts CHANGED
@@ -10,7 +10,12 @@ export const OPENAI_API_KEY = writable(
10
  (browser && localStorage.OPENAI_API_KEY) || ""
11
  );
12
 
 
 
 
 
13
  if (browser) {
14
  HF_ACCESS_TOKEN.subscribe((value) => (localStorage.content = value));
15
  OPENAI_API_KEY.subscribe((value) => (localStorage.content = value));
 
16
  }
 
10
  (browser && localStorage.OPENAI_API_KEY) || ""
11
  );
12
 
13
+ export const HF_ENDPOINT = writable(
14
+ (browser && localStorage.HF_ENDPOINT) || ""
15
+ );
16
+
17
  if (browser) {
18
  HF_ACCESS_TOKEN.subscribe((value) => (localStorage.content = value));
19
  OPENAI_API_KEY.subscribe((value) => (localStorage.content = value));
20
+ HF_ENDPOINT.subscribe((value) => (localStorage.content = value));
21
  }
src/routes/+layout.svelte CHANGED
@@ -4,6 +4,5 @@
4
  </script>
5
 
6
  <div class="p-4 max-w-xl mx-auto">
7
- <ApiKeyModal />
8
  <slot />
9
  </div>
 
4
  </script>
5
 
6
  <div class="p-4 max-w-xl mx-auto">
 
7
  <slot />
8
  </div>
src/routes/+page.svelte CHANGED
@@ -11,6 +11,7 @@
11
  import type { LLM } from "$lib/types";
12
  import { HFLLM } from "$lib/agents/llm";
13
  import { OPENAI_API_KEY } from "$lib/store";
 
14
 
15
  let prompt =
16
  "Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.";
@@ -36,14 +37,15 @@
36
 
37
  const onRun = async (code: string) => {
38
  messages = [];
 
 
 
39
 
40
  const wrapperEval = await evalBuilder(
41
  code,
42
  tools.filter((el) => selectedTools.includes(el.name)),
43
  files,
44
- (message, data) => {
45
- messages = [...messages, { message, data }];
46
- }
47
  );
48
 
49
  isLoading = true;
@@ -59,12 +61,21 @@
59
  }
60
  isLoading = false;
61
  };
 
 
62
  </script>
63
 
 
 
64
  <div class="flex flex-col space-y-4 max-w-xl">
65
- <div class="flex flex-row justify-around">
66
  <LogoHuggingFaceBorderless classNames="text-4xl" />
67
- <h1 class="text-3xl font-semibold w-fit mx-auto">Agents.js</h1>
 
 
 
 
 
68
  </div>
69
 
70
  {#if $OPENAI_API_KEY !== ""}
 
11
  import type { LLM } from "$lib/types";
12
  import { HFLLM } from "$lib/agents/llm";
13
  import { OPENAI_API_KEY } from "$lib/store";
14
+ import ApiKeyModal from "$lib/components/ApiKeyModal.svelte";
15
 
16
  let prompt =
17
  "Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.";
 
37
 
38
  const onRun = async (code: string) => {
39
  messages = [];
40
+ const callback = (message: string, data: string | Blob | undefined) => {
41
+ messages = [...messages, { message, data }];
42
+ };
43
 
44
  const wrapperEval = await evalBuilder(
45
  code,
46
  tools.filter((el) => selectedTools.includes(el.name)),
47
  files,
48
+ callback
 
 
49
  );
50
 
51
  isLoading = true;
 
61
  }
62
  isLoading = false;
63
  };
64
+
65
+ let dialogElement: HTMLDialogElement;
66
  </script>
67
 
68
+ <ApiKeyModal bind:dialogElement />
69
+
70
  <div class="flex flex-col space-y-4 max-w-xl">
71
+ <div class="flex flex-row">
72
  <LogoHuggingFaceBorderless classNames="text-4xl" />
73
+ <h1 class="text-3xl font-semibold mx-auto">Agents.js</h1>
74
+ <button
75
+ class="btn btn-ghost"
76
+ on:click={() => dialogElement.showModal()}
77
+ on:keydown={() => dialogElement.showModal()}>API keys</button
78
+ >
79
  </div>
80
 
81
  {#if $OPENAI_API_KEY !== ""}