jbilcke-hf HF staff commited on
Commit
b022cb9
β€’
1 Parent(s): e015622

release 1.3

Browse files
README.md CHANGED
@@ -123,7 +123,7 @@ LLM_ENGINE="OPENAI"
123
  # default openai api base url is: https://api.openai.com/v1
124
  LLM_OPENAI_API_BASE_URL="A custom OpenAI API Base URL if you have some special privileges"
125
 
126
- LLM_OPENAI_API_MODEL="gpt-3.5-turbo"
127
 
128
  AUTH_OPENAI_API_KEY="Yourown OpenAI API Key"
129
  ```
 
123
  # default openai api base url is: https://api.openai.com/v1
124
  LLM_OPENAI_API_BASE_URL="A custom OpenAI API Base URL if you have some special privileges"
125
 
126
+ LLM_OPENAI_API_MODEL="gpt-4-turbo-preview"
127
 
128
  AUTH_OPENAI_API_KEY="Yourown OpenAI API Key"
129
  ```
package-lock.json CHANGED
@@ -49,6 +49,7 @@
49
  "openai": "^4.29.2",
50
  "pick": "^0.0.1",
51
  "postcss": "8.4.37",
 
52
  "react": "18.2.0",
53
  "react-circular-progressbar": "^2.1.0",
54
  "react-contenteditable": "^3.3.7",
@@ -3391,6 +3392,14 @@
3391
  }
3392
  }
3393
  },
 
 
 
 
 
 
 
 
3394
  "node_modules/deep-is": {
3395
  "version": "0.1.4",
3396
  "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
@@ -4228,6 +4237,17 @@
4228
  "node": ">=8"
4229
  }
4230
  },
 
 
 
 
 
 
 
 
 
 
 
4231
  "node_modules/find-up": {
4232
  "version": "5.0.0",
4233
  "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
@@ -6072,6 +6092,22 @@
6072
  "node": ">=6"
6073
  }
6074
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6075
  "node_modules/queue-microtask": {
6076
  "version": "1.2.3",
6077
  "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -6697,6 +6733,17 @@
6697
  "node": ">=0.10.0"
6698
  }
6699
  },
 
 
 
 
 
 
 
 
 
 
 
6700
  "node_modules/streamsearch": {
6701
  "version": "1.1.0",
6702
  "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
 
49
  "openai": "^4.29.2",
50
  "pick": "^0.0.1",
51
  "postcss": "8.4.37",
52
+ "query-string": "^9.0.0",
53
  "react": "18.2.0",
54
  "react-circular-progressbar": "^2.1.0",
55
  "react-contenteditable": "^3.3.7",
 
3392
  }
3393
  }
3394
  },
3395
+ "node_modules/decode-uri-component": {
3396
+ "version": "0.4.1",
3397
+ "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.4.1.tgz",
3398
+ "integrity": "sha512-+8VxcR21HhTy8nOt6jf20w0c9CADrw1O8d+VZ/YzzCt4bJ3uBjw+D1q2osAB8RnpwwaeYBxy0HyKQxD5JBMuuQ==",
3399
+ "engines": {
3400
+ "node": ">=14.16"
3401
+ }
3402
+ },
3403
  "node_modules/deep-is": {
3404
  "version": "0.1.4",
3405
  "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
 
4237
  "node": ">=8"
4238
  }
4239
  },
4240
+ "node_modules/filter-obj": {
4241
+ "version": "5.1.0",
4242
+ "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-5.1.0.tgz",
4243
+ "integrity": "sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==",
4244
+ "engines": {
4245
+ "node": ">=14.16"
4246
+ },
4247
+ "funding": {
4248
+ "url": "https://github.com/sponsors/sindresorhus"
4249
+ }
4250
+ },
4251
  "node_modules/find-up": {
4252
  "version": "5.0.0",
4253
  "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
 
6092
  "node": ">=6"
6093
  }
6094
  },
6095
+ "node_modules/query-string": {
6096
+ "version": "9.0.0",
6097
+ "resolved": "https://registry.npmjs.org/query-string/-/query-string-9.0.0.tgz",
6098
+ "integrity": "sha512-4EWwcRGsO2H+yzq6ddHcVqkCQ2EFUSfDMEjF8ryp8ReymyZhIuaFRGLomeOQLkrzacMHoyky2HW0Qe30UbzkKw==",
6099
+ "dependencies": {
6100
+ "decode-uri-component": "^0.4.1",
6101
+ "filter-obj": "^5.1.0",
6102
+ "split-on-first": "^3.0.0"
6103
+ },
6104
+ "engines": {
6105
+ "node": ">=18"
6106
+ },
6107
+ "funding": {
6108
+ "url": "https://github.com/sponsors/sindresorhus"
6109
+ }
6110
+ },
6111
  "node_modules/queue-microtask": {
6112
  "version": "1.2.3",
6113
  "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
 
6733
  "node": ">=0.10.0"
6734
  }
6735
  },
6736
+ "node_modules/split-on-first": {
6737
+ "version": "3.0.0",
6738
+ "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-3.0.0.tgz",
6739
+ "integrity": "sha512-qxQJTx2ryR0Dw0ITYyekNQWpz6f8dGd7vffGNflQQ3Iqj9NJ6qiZ7ELpZsJ/QBhIVAiDfXdag3+Gp8RvWa62AA==",
6740
+ "engines": {
6741
+ "node": ">=12"
6742
+ },
6743
+ "funding": {
6744
+ "url": "https://github.com/sponsors/sindresorhus"
6745
+ }
6746
+ },
6747
  "node_modules/streamsearch": {
6748
  "version": "1.1.0",
6749
  "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
package.json CHANGED
@@ -50,6 +50,7 @@
50
  "openai": "^4.29.2",
51
  "pick": "^0.0.1",
52
  "postcss": "8.4.37",
 
53
  "react": "18.2.0",
54
  "react-circular-progressbar": "^2.1.0",
55
  "react-contenteditable": "^3.3.7",
 
50
  "openai": "^4.29.2",
51
  "pick": "^0.0.1",
52
  "postcss": "8.4.37",
53
+ "query-string": "^9.0.0",
54
  "react": "18.2.0",
55
  "react-circular-progressbar": "^2.1.0",
56
  "react-contenteditable": "^3.3.7",
src/app/interface/about/index.tsx CHANGED
@@ -8,8 +8,8 @@ import { Login } from "../login"
8
  const APP_NAME = `AI Comic Factory`
9
  const APP_DOMAIN = `aicomicfactory.app`
10
  const APP_URL = `https://aicomicfactory.app`
11
- const APP_VERSION = `1.2`
12
- const APP_RELEASE_DATE = `March 2024`
13
 
14
  const ExternalLink = ({ url, children }: { url: string; children: ReactNode }) => {
15
  return (
@@ -33,7 +33,6 @@ export function About() {
33
  </DialogTrigger>
34
  <DialogContent className="w-full sm:max-w-[500px] md:max-w-[600px] overflow-y-scroll h-[100vh] sm:h-[550px]">
35
  <DialogHeader>
36
- <DialogTitle><ExternalLink url={APP_URL}>{APP_DOMAIN}</ExternalLink> {APP_VERSION}</DialogTitle>
37
  <DialogDescription className="w-full text-center text-2xl font-bold text-stone-700">
38
  <ExternalLink url={APP_URL}>{APP_DOMAIN}</ExternalLink> {APP_VERSION} ({APP_RELEASE_DATE})
39
  </DialogDescription>
 
8
  const APP_NAME = `AI Comic Factory`
9
  const APP_DOMAIN = `aicomicfactory.app`
10
  const APP_URL = `https://aicomicfactory.app`
11
+ const APP_VERSION = `1.3`
12
+ const APP_RELEASE_DATE = `April 2024`
13
 
14
  const ExternalLink = ({ url, children }: { url: string; children: ReactNode }) => {
15
  return (
 
33
  </DialogTrigger>
34
  <DialogContent className="w-full sm:max-w-[500px] md:max-w-[600px] overflow-y-scroll h-[100vh] sm:h-[550px]">
35
  <DialogHeader>
 
36
  <DialogDescription className="w-full text-center text-2xl font-bold text-stone-700">
37
  <ExternalLink url={APP_URL}>{APP_DOMAIN}</ExternalLink> {APP_VERSION} ({APP_RELEASE_DATE})
38
  </DialogDescription>
src/app/interface/auth-wall/index.tsx CHANGED
@@ -2,22 +2,31 @@
2
  import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog"
3
 
4
  import { Login } from "../login"
 
5
 
6
  export function AuthWall({ show }: { show: boolean }) {
7
  return (
8
  <Dialog open={show}>
9
- <DialogContent className="sm:max-w-[425px]">
10
- <div className="grid gap-4 py-4 text-stone-800">
11
  <p className="">
12
- The AI Comic Factory is a free app available to all Hugging Face users!
13
  </p>
14
  <p>
15
- Please sign-in to continue:
 
16
  </p>
17
  <p>
18
  <Login />
19
  </p>
20
- <p>(temporary issue alert: if this doesn&apos;t work for you, please use the button in the About panel)</p>
 
 
 
 
 
 
 
21
  </div>
22
  </DialogContent>
23
  </Dialog>
 
2
  import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog"
3
 
4
  import { Login } from "../login"
5
+ import { SettingsDialog } from "../settings-dialog"
6
 
7
  export function AuthWall({ show }: { show: boolean }) {
8
  return (
9
  <Dialog open={show}>
10
+ <DialogContent className="sm:max-w-[800px]">
11
+ <div className="grid gap-4 py-4 text-stone-800 text-center text-xl">
12
  <p className="">
13
+ The AI Comic Factory is a free app compatible with many vendors.
14
  </p>
15
  <p>
16
+ By default it uses Hugging Face for story and image generation,<br/>
17
+ our service is free of charge but we would like you to sign-in πŸ‘‡
18
  </p>
19
  <p>
20
  <Login />
21
  </p>
22
+ {/*<p>(if login doesn&apos;t work for you, please use the button in the About panel)</p>*/}
23
+ <p className="mt-2 text-lg">
24
+ To hide this message, you can also go in the <SettingsDialog /> to replace<br/>
25
+ both the image and the story providers to use external vendors.
26
+ </p>
27
+ <p className="mt-2 text-base">
28
+ This pop-up will also disappear if you <a className="text-stone-600 underline" href="https://github.com/jbilcke-hf/ai-comic-factory" target="_blank">download the code</a> to run the app at home.
29
+ </p>
30
  </div>
31
  </DialogContent>
32
  </Dialog>
src/app/interface/login/login.tsx CHANGED
@@ -7,7 +7,7 @@ import { useOAuth } from "@/lib/useOAuth"
7
 
8
  function Login() {
9
  const { login } = useOAuth({ debug: false })
10
- return <Button onClick={login}>Sign-in with Hugging Face</Button>
11
  }
12
 
13
  export default Login
 
7
 
8
  function Login() {
9
  const { login } = useOAuth({ debug: false })
10
+ return <Button onClick={login} className="text-xl">Sign-in with Hugging Face</Button>
11
  }
12
 
13
  export default Login
src/app/interface/settings-dialog/defaultSettings.ts CHANGED
@@ -1,8 +1,9 @@
1
- import { RenderingModelVendor, Settings } from "@/types"
2
 
3
  export const defaultSettings: Settings = {
4
  renderingModelVendor: "SERVER" as RenderingModelVendor,
5
  renderingUseTurbo: false,
 
6
  huggingFaceOAuth: "",
7
  huggingfaceApiKey: "",
8
  huggingfaceInferenceApiModel: "stabilityai/stable-diffusion-xl-base-1.0",
@@ -14,9 +15,11 @@ export const defaultSettings: Settings = {
14
  replicateApiModelTrigger: "",
15
  openaiApiKey: "",
16
  openaiApiModel: "dall-e-3",
17
- openaiApiLanguageModel: "gpt-4",
18
  groqApiKey: "",
19
  groqApiLanguageModel: "mixtral-8x7b-32768",
 
 
20
  hasGeneratedAtLeastOnce: false,
21
  userDefinedMaxNumberOfPages: 1,
22
  }
 
1
+ import { LLMVendor, RenderingModelVendor, Settings } from "@/types"
2
 
3
  export const defaultSettings: Settings = {
4
  renderingModelVendor: "SERVER" as RenderingModelVendor,
5
  renderingUseTurbo: false,
6
+ llmVendor: "SERVER" as LLMVendor,
7
  huggingFaceOAuth: "",
8
  huggingfaceApiKey: "",
9
  huggingfaceInferenceApiModel: "stabilityai/stable-diffusion-xl-base-1.0",
 
15
  replicateApiModelTrigger: "",
16
  openaiApiKey: "",
17
  openaiApiModel: "dall-e-3",
18
+ openaiApiLanguageModel: "gpt-4-turbo-preview",
19
  groqApiKey: "",
20
  groqApiLanguageModel: "mixtral-8x7b-32768",
21
+ anthropicApiKey: "",
22
+ anthropicApiLanguageModel: "claude-3-opus-20240229",
23
  hasGeneratedAtLeastOnce: false,
24
  userDefinedMaxNumberOfPages: 1,
25
  }
src/app/interface/settings-dialog/getSettings.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { RenderingModelVendor, Settings } from "@/types"
2
 
3
  import { getValidString } from "@/lib/getValidString"
4
  import { localStorageKeys } from "./localStorageKeys"
@@ -11,6 +11,7 @@ export function getSettings(): Settings {
11
  return {
12
  renderingModelVendor: getValidString(localStorage?.getItem?.(localStorageKeys.renderingModelVendor), defaultSettings.renderingModelVendor) as RenderingModelVendor,
13
  renderingUseTurbo: getValidBoolean(localStorage?.getItem?.(localStorageKeys.renderingUseTurbo), defaultSettings.renderingUseTurbo),
 
14
  huggingFaceOAuth: getValidString(localStorage?.getItem?.(localStorageKeys.huggingFaceOAuth), defaultSettings.huggingFaceOAuth),
15
  huggingfaceApiKey: getValidString(localStorage?.getItem?.(localStorageKeys.huggingfaceApiKey), defaultSettings.huggingfaceApiKey),
16
  huggingfaceInferenceApiModel: getValidString(localStorage?.getItem?.(localStorageKeys.huggingfaceInferenceApiModel), defaultSettings.huggingfaceInferenceApiModel),
@@ -25,6 +26,8 @@ export function getSettings(): Settings {
25
  openaiApiLanguageModel: getValidString(localStorage?.getItem?.(localStorageKeys.openaiApiLanguageModel), defaultSettings.openaiApiLanguageModel),
26
  groqApiKey: getValidString(localStorage?.getItem?.(localStorageKeys.groqApiKey), defaultSettings.groqApiKey),
27
  groqApiLanguageModel: getValidString(localStorage?.getItem?.(localStorageKeys.groqApiLanguageModel), defaultSettings.groqApiLanguageModel),
 
 
28
  hasGeneratedAtLeastOnce: getValidBoolean(localStorage?.getItem?.(localStorageKeys.hasGeneratedAtLeastOnce), defaultSettings.hasGeneratedAtLeastOnce),
29
  userDefinedMaxNumberOfPages: getValidNumber(localStorage?.getItem?.(localStorageKeys.userDefinedMaxNumberOfPages), 1, Number.MAX_SAFE_INTEGER, defaultSettings.userDefinedMaxNumberOfPages),
30
  }
 
1
+ import { LLMVendor, RenderingModelVendor, Settings } from "@/types"
2
 
3
  import { getValidString } from "@/lib/getValidString"
4
  import { localStorageKeys } from "./localStorageKeys"
 
11
  return {
12
  renderingModelVendor: getValidString(localStorage?.getItem?.(localStorageKeys.renderingModelVendor), defaultSettings.renderingModelVendor) as RenderingModelVendor,
13
  renderingUseTurbo: getValidBoolean(localStorage?.getItem?.(localStorageKeys.renderingUseTurbo), defaultSettings.renderingUseTurbo),
14
+ llmVendor: getValidString(localStorage?.getItem?.(localStorageKeys.llmVendor), defaultSettings.llmVendor) as LLMVendor,
15
  huggingFaceOAuth: getValidString(localStorage?.getItem?.(localStorageKeys.huggingFaceOAuth), defaultSettings.huggingFaceOAuth),
16
  huggingfaceApiKey: getValidString(localStorage?.getItem?.(localStorageKeys.huggingfaceApiKey), defaultSettings.huggingfaceApiKey),
17
  huggingfaceInferenceApiModel: getValidString(localStorage?.getItem?.(localStorageKeys.huggingfaceInferenceApiModel), defaultSettings.huggingfaceInferenceApiModel),
 
26
  openaiApiLanguageModel: getValidString(localStorage?.getItem?.(localStorageKeys.openaiApiLanguageModel), defaultSettings.openaiApiLanguageModel),
27
  groqApiKey: getValidString(localStorage?.getItem?.(localStorageKeys.groqApiKey), defaultSettings.groqApiKey),
28
  groqApiLanguageModel: getValidString(localStorage?.getItem?.(localStorageKeys.groqApiLanguageModel), defaultSettings.groqApiLanguageModel),
29
+ anthropicApiKey: getValidString(localStorage?.getItem?.(localStorageKeys.anthropicApiKey), defaultSettings.anthropicApiKey),
30
+ anthropicApiLanguageModel: getValidString(localStorage?.getItem?.(localStorageKeys.anthropicApiLanguageModel), defaultSettings.anthropicApiLanguageModel),
31
  hasGeneratedAtLeastOnce: getValidBoolean(localStorage?.getItem?.(localStorageKeys.hasGeneratedAtLeastOnce), defaultSettings.hasGeneratedAtLeastOnce),
32
  userDefinedMaxNumberOfPages: getValidNumber(localStorage?.getItem?.(localStorageKeys.userDefinedMaxNumberOfPages), 1, Number.MAX_SAFE_INTEGER, defaultSettings.userDefinedMaxNumberOfPages),
33
  }
src/app/interface/settings-dialog/index.tsx CHANGED
@@ -13,7 +13,7 @@ import {
13
  SelectValue,
14
  } from "@/components/ui/select"
15
 
16
- import { RenderingModelVendor } from "@/types"
17
  import { Input } from "@/components/ui/input"
18
 
19
  import { Label } from "./label"
@@ -24,6 +24,8 @@ import { defaultSettings } from "./defaultSettings"
24
  import { useDynamicConfig } from "@/lib/useDynamicConfig"
25
  import { Slider } from "@/components/ui/slider"
26
  import { fonts } from "@/lib/fonts"
 
 
27
 
28
  export function SettingsDialog() {
29
  const [isOpen, setOpen] = useState(false)
@@ -35,6 +37,10 @@ export function SettingsDialog() {
35
  localStorageKeys.renderingUseTurbo,
36
  defaultSettings.renderingUseTurbo
37
  )
 
 
 
 
38
  const [huggingfaceApiKey, setHuggingfaceApiKey] = useLocalStorage<string>(
39
  localStorageKeys.huggingfaceApiKey,
40
  defaultSettings.huggingfaceApiKey
@@ -75,6 +81,26 @@ export function SettingsDialog() {
75
  localStorageKeys.openaiApiModel,
76
  defaultSettings.openaiApiModel
77
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  const [userDefinedMaxNumberOfPages, setUserDefinedMaxNumberOfPages] = useLocalStorage<number>(
79
  localStorageKeys.userDefinedMaxNumberOfPages,
80
  defaultSettings.userDefinedMaxNumberOfPages
@@ -87,19 +113,25 @@ export function SettingsDialog() {
87
  <DialogTrigger asChild>
88
  <Button className="space-x-1 md:space-x-2">
89
  <div>
90
- <span className="hidden md:inline">Settings</span>
91
  </div>
92
  </Button>
93
  </DialogTrigger>
94
- <DialogContent className="w-full sm:max-w-[500px] md:max-w-[700px]">
95
  <DialogHeader>
96
- <DialogDescription className="w-full text-center text-lg font-bold text-stone-800">
97
- Settings
98
  </DialogDescription>
99
  </DialogHeader>
100
  <div className="overflow-y-scroll h-[75vh] md:h-[70vh]">
 
 
 
 
 
 
101
  {isConfigReady && <Field>
102
- <Label>(new!) Control the number of pages: {userDefinedMaxNumberOfPages}</Label>
103
  <Slider
104
  min={1}
105
  max={maxNbPages}
@@ -115,31 +147,11 @@ export function SettingsDialog() {
115
  />
116
  </Field>
117
  }
118
- <div className="grid gap-4 pt-8 pb-1 space-y-1 text-stone-800">
119
- <Field>
120
- <Label>Image rendering provider:</Label>
121
- <p className="pt-2 pb-3 text-base italic text-zinc-600">
122
- ℹ️ Some API vendors have a delay for rarely used models.<br/>
123
- πŸ‘‰ In case of trouble, try again after 5-10 minutes.
124
- </p>
125
-
126
- <Select
127
- onValueChange={(value: string) => {
128
- setRenderingModelVendor(value as RenderingModelVendor)
129
- }}
130
- defaultValue={renderingModelVendor}>
131
- <SelectTrigger className="">
132
- <SelectValue placeholder="Theme" />
133
- </SelectTrigger>
134
- <SelectContent>
135
- <SelectItem value="SERVER">Use server settings (default)</SelectItem>
136
- <SelectItem value="HUGGINGFACE">Custom Hugging Face model (recommended)</SelectItem>
137
- <SelectItem value="REPLICATE">Custom Replicate model (will use your own account)</SelectItem>
138
- <SelectItem value="OPENAI">DALLΒ·E 3 by OpenAI (partial support, will use your own account)</SelectItem>
139
- </SelectContent>
140
- </Select>
141
- </Field>
142
-
143
 
144
  {
145
  // renderingModelVendor === "SERVER" && <>
@@ -168,6 +180,29 @@ export function SettingsDialog() {
168
  // </>
169
  }
170
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  {renderingModelVendor === "HUGGINGFACE" && <>
172
  <Field>
173
  <Label>Hugging Face API Token (<a className="text-stone-600 underline" href="https://huggingface.co/subscribe/pro" target="_blank">PRO account</a> recommended for higher rate limit):</Label>
@@ -247,7 +282,7 @@ export function SettingsDialog() {
247
 
248
  {renderingModelVendor === "REPLICATE" && <>
249
  <Field>
250
- <Label>Replicate API Token (you will be billed based on Replicate pricing):</Label>
251
  <Input
252
  className={fonts.actionman.className}
253
  type="password"
@@ -296,10 +331,112 @@ export function SettingsDialog() {
296
  </Field>
297
  </>}
298
 
299
- <p className="text-sm text-zinc-700 italic">
300
- πŸ”’ Settings such as API keys are stored inside your browser and aren&apos;t kept on our servers.
301
- </p>
302
- </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
303
 
304
  </div>
305
 
 
13
  SelectValue,
14
  } from "@/components/ui/select"
15
 
16
+ import { LLMVendor, RenderingModelVendor } from "@/types"
17
  import { Input } from "@/components/ui/input"
18
 
19
  import { Label } from "./label"
 
24
  import { useDynamicConfig } from "@/lib/useDynamicConfig"
25
  import { Slider } from "@/components/ui/slider"
26
  import { fonts } from "@/lib/fonts"
27
+ import { cn } from "@/lib/utils"
28
+ import { SectionTitle } from "./section-title"
29
 
30
  export function SettingsDialog() {
31
  const [isOpen, setOpen] = useState(false)
 
37
  localStorageKeys.renderingUseTurbo,
38
  defaultSettings.renderingUseTurbo
39
  )
40
+ const [llmVendor, setLlmModelVendor] = useLocalStorage<LLMVendor>(
41
+ localStorageKeys.llmVendor,
42
+ defaultSettings.llmVendor
43
+ )
44
  const [huggingfaceApiKey, setHuggingfaceApiKey] = useLocalStorage<string>(
45
  localStorageKeys.huggingfaceApiKey,
46
  defaultSettings.huggingfaceApiKey
 
81
  localStorageKeys.openaiApiModel,
82
  defaultSettings.openaiApiModel
83
  )
84
+ const [openaiApiLanguageModel, setOpenaiApiLanguageModel] = useLocalStorage<string>(
85
+ localStorageKeys.openaiApiLanguageModel,
86
+ defaultSettings.openaiApiLanguageModel
87
+ )
88
+ const [groqApiKey, setGroqApiKey] = useLocalStorage<string>(
89
+ localStorageKeys.groqApiKey,
90
+ defaultSettings.groqApiKey
91
+ )
92
+ const [groqApiLanguageModel, setGroqApiLanguageModel] = useLocalStorage<string>(
93
+ localStorageKeys.groqApiLanguageModel,
94
+ defaultSettings.groqApiLanguageModel
95
+ )
96
+ const [anthropicApiKey, setAnthropicApiKey] = useLocalStorage<string>(
97
+ localStorageKeys.anthropicApiKey,
98
+ defaultSettings.anthropicApiKey
99
+ )
100
+ const [anthropicApiLanguageModel, setAnthropicApiLanguageModel] = useLocalStorage<string>(
101
+ localStorageKeys.anthropicApiLanguageModel,
102
+ defaultSettings.anthropicApiLanguageModel
103
+ )
104
  const [userDefinedMaxNumberOfPages, setUserDefinedMaxNumberOfPages] = useLocalStorage<number>(
105
  localStorageKeys.userDefinedMaxNumberOfPages,
106
  defaultSettings.userDefinedMaxNumberOfPages
 
113
  <DialogTrigger asChild>
114
  <Button className="space-x-1 md:space-x-2">
115
  <div>
116
+ <span className="">Settings</span>
117
  </div>
118
  </Button>
119
  </DialogTrigger>
120
+ <DialogContent className="w-full sm:max-w-[500px] md:max-w-[700px] bg-gray-100">
121
  <DialogHeader>
122
+ <DialogDescription className="w-full text-center text-2xl font-bold text-stone-800">
123
+ AI Comic Factory Settings
124
  </DialogDescription>
125
  </DialogHeader>
126
  <div className="overflow-y-scroll h-[75vh] md:h-[70vh]">
127
+ <p className="text-base italic text-zinc-600 w-full text-center">
128
+ ℹ️ Some models can take time to cold-start, or be under heavy traffic.<br/>
129
+ πŸ‘‰ In case of trouble, try again after 5-10 minutes.<br/>
130
+ πŸ”’ Your settings are stored inside your browser, not on our servers.
131
+ </p>
132
+ <SectionTitle>πŸ‘‡ General options</SectionTitle>
133
  {isConfigReady && <Field>
134
+ <Label className="pt-2">Move the slider to set the total expected number of pages: {userDefinedMaxNumberOfPages}</Label>
135
  <Slider
136
  min={1}
137
  max={maxNbPages}
 
147
  />
148
  </Field>
149
  }
150
+ <div className={cn(
151
+ `grid gap-2 pt-3 pb-1`,
152
+ `text-stone-800`
153
+ )}>
154
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
  {
157
  // renderingModelVendor === "SERVER" && <>
 
180
  // </>
181
  }
182
 
183
+ <SectionTitle>πŸ‘‡ Panel rendering options</SectionTitle>
184
+
185
+ <Field>
186
+ <Label className={cn(
187
+ )}>Image generation - please choose a stable diffusion provider:</Label>
188
+ <Select
189
+ onValueChange={(value: string) => {
190
+ setRenderingModelVendor(value as RenderingModelVendor)
191
+ }}
192
+ defaultValue={renderingModelVendor}
193
+ value={renderingModelVendor}>
194
+ <SelectTrigger className="bg-white">
195
+ <SelectValue />
196
+ </SelectTrigger>
197
+ <SelectContent>
198
+ <SelectItem value="SERVER">Default Hugging Face server (free but limited capacity, not always online)</SelectItem>
199
+ <SelectItem value="HUGGINGFACE">Custom Inference API model (pro hugging face account recommended)</SelectItem>
200
+ <SelectItem value="REPLICATE">Custom Replicate model (will bill your own account)</SelectItem>
201
+ <SelectItem value="OPENAI">DALLΒ·E 3 by OpenAI (partial support, will bill your own account)</SelectItem>
202
+ </SelectContent>
203
+ </Select>
204
+ </Field>
205
+
206
  {renderingModelVendor === "HUGGINGFACE" && <>
207
  <Field>
208
  <Label>Hugging Face API Token (<a className="text-stone-600 underline" href="https://huggingface.co/subscribe/pro" target="_blank">PRO account</a> recommended for higher rate limit):</Label>
 
282
 
283
  {renderingModelVendor === "REPLICATE" && <>
284
  <Field>
285
+ <Label>Replicate API Token:</Label>
286
  <Input
287
  className={fonts.actionman.className}
288
  type="password"
 
331
  </Field>
332
  </>}
333
 
334
+ <SectionTitle>πŸ‘‡ Story generation options (🚧 experimental alpbetaha 🚧)</SectionTitle>
335
+
336
+ <p>⚠️ I haven&apos;t tested all vendors yet, so please report issues to Discord!<br/>
337
+ ⚠️ Billing and privacy depend on your preferred vendor so please exercice caution.</p>
338
+ <Field>
339
+ <Label className={cn(
340
+ )}>Story generation - please choose a LLM provider:</Label>
341
+ <Select
342
+ onValueChange={(value: string) => {
343
+ setLlmModelVendor(value as LLMVendor)
344
+ }}
345
+ defaultValue={llmVendor}
346
+ value={llmVendor}>
347
+ <SelectTrigger className="bg-white">
348
+ <SelectValue />
349
+ </SelectTrigger>
350
+ <SelectContent>
351
+ <SelectItem value="SERVER">Default Hugging Face server (free but limited capacity, not always online)</SelectItem>
352
+ <SelectItem value="GROQ">Open-source models on Groq (will bill your own account)</SelectItem>
353
+ <SelectItem value="ANTHROPIC">Claude by Anthropic (will bill your own account)</SelectItem>
354
+ <SelectItem value="OPENAI">ChatGPT by OpenAI (will bill your own account)</SelectItem>
355
+ </SelectContent>
356
+ </Select>
357
+ </Field>
358
+
359
+ {llmVendor === "GROQ" && <>
360
+ <Field>
361
+ <Label>Groq API Token:</Label>
362
+ <Input
363
+ className={fonts.actionman.className}
364
+ type="password"
365
+ placeholder="Enter your private api token"
366
+ onChange={(x) => {
367
+ setGroqApiKey(x.target.value)
368
+ }}
369
+ value={groqApiKey}
370
+ />
371
+ </Field>
372
+ <Field>
373
+ <Label>Open-source Model ID:</Label>
374
+ <Input
375
+ className={fonts.actionman.className}
376
+ placeholder="Name of the LLM"
377
+ onChange={(x) => {
378
+ setGroqApiLanguageModel(x.target.value)
379
+ }}
380
+ value={groqApiLanguageModel}
381
+ />
382
+ </Field>
383
+ </>}
384
+
385
+
386
+ {llmVendor === "ANTHROPIC" && <>
387
+ <Field>
388
+ <Label>Anthropic API Token:</Label>
389
+ <Input
390
+ className={fonts.actionman.className}
391
+ type="password"
392
+ placeholder="Enter your private api token"
393
+ onChange={(x) => {
394
+ setAnthropicApiKey(x.target.value)
395
+ }}
396
+ value={anthropicApiKey}
397
+ />
398
+ </Field>
399
+ <Field>
400
+ <Label>Proprietary Model ID:</Label>
401
+ <Input
402
+ className={fonts.actionman.className}
403
+ placeholder="Name of the LLM"
404
+ onChange={(x) => {
405
+ setAnthropicApiLanguageModel(x.target.value)
406
+ }}
407
+ value={anthropicApiLanguageModel}
408
+ />
409
+ </Field>
410
+ </>}
411
+
412
+
413
+ {llmVendor === "OPENAI" && <>
414
+ <Field>
415
+ <Label>OpenAI API Token:</Label>
416
+ <Input
417
+ className={fonts.actionman.className}
418
+ type="password"
419
+ placeholder="Enter your private api token"
420
+ onChange={(x) => {
421
+ setOpenaiApiKey(x.target.value)
422
+ }}
423
+ value={openaiApiKey}
424
+ />
425
+ </Field>
426
+ <Field>
427
+ <Label>Proprietary Model ID:</Label>
428
+ <Input
429
+ className={fonts.actionman.className}
430
+ placeholder="Name of the LLM"
431
+ onChange={(x) => {
432
+ setOpenaiApiLanguageModel(x.target.value)
433
+ }}
434
+ value={openaiApiLanguageModel}
435
+ />
436
+ </Field>
437
+ </>}
438
+
439
+ </div>
440
 
441
  </div>
442
 
src/app/interface/settings-dialog/label.tsx CHANGED
@@ -1,7 +1,15 @@
1
  import { ReactNode } from "react"
2
 
3
- export function Label({ children }: { children: ReactNode }) {
 
 
 
 
 
4
  return (
5
- <label className="text-xl font-semibold text-zinc-700">{children}</label>
 
 
 
6
  )
7
  }
 
1
  import { ReactNode } from "react"
2
 
3
+ import { cn } from "@/lib/utils"
4
+
5
+ export function Label({ className, children }: {
6
+ className?: string
7
+ children: ReactNode
8
+ }) {
9
  return (
10
+ <label className={cn(
11
+ `text-base font-semibold text-zinc-700`,
12
+ className
13
+ )}>{children}</label>
14
  )
15
  }
src/app/interface/settings-dialog/localStorageKeys.ts CHANGED
@@ -1,22 +1,29 @@
1
  import { Settings } from "@/types"
2
 
 
 
 
 
3
  export const localStorageKeys: Record<keyof Settings, string> = {
4
- renderingModelVendor: "CONF_RENDERING_MODEL_VENDOR",
5
- renderingUseTurbo: "CONF_RENDERING_USE_TURBO",
6
- huggingFaceOAuth: "CONF_AUTH_HF_OAUTH",
7
- huggingfaceApiKey: "CONF_AUTH_HF_API_TOKEN",
8
- huggingfaceInferenceApiModel: "CONF_RENDERING_HF_INFERENCE_API_BASE_MODEL",
9
- huggingfaceInferenceApiModelTrigger: "CONF_RENDERING_HF_INFERENCE_API_BASE_MODEL_TRIGGER",
10
- huggingfaceInferenceApiFileType: "CONF_RENDERING_HF_INFERENCE_API_FILE_TYPE",
11
- replicateApiKey: "CONF_AUTH_REPLICATE_API_TOKEN",
12
- replicateApiModel: "CONF_RENDERING_REPLICATE_API_MODEL",
13
- replicateApiModelVersion: "CONF_RENDERING_REPLICATE_API_MODEL_VERSION",
14
- replicateApiModelTrigger: "CONF_RENDERING_REPLICATE_API_MODEL_TRIGGER",
15
- openaiApiKey: "CONF_AUTH_OPENAI_API_KEY",
16
- openaiApiModel: "CONF_AUTH_OPENAI_API_MODEL",
17
- openaiApiLanguageModel: "CONF_AUTH_OPENAI_API_LANGUAGE_MODEL",
18
- groqApiKey: "CONF_AUTH_GROQ_API_KEY",
19
- groqApiLanguageModel: "CONF_AUTH_GROQ_API_LANGUAGE_MODEL",
20
- hasGeneratedAtLeastOnce: "CONF_HAS_GENERATED_AT_LEAST_ONCE",
21
- userDefinedMaxNumberOfPages: "CONF_USER_DEFINED_MAX_NUMBER_OF_PAGES"
 
 
 
22
  }
 
1
  import { Settings } from "@/types"
2
 
3
+ // let's keep it "version 0" for now, so as to not disrupt current users
4
+ // however at some point we might need to upgrade and invalid the default values
5
+ const version = ``
6
+
7
  export const localStorageKeys: Record<keyof Settings, string> = {
8
+ renderingModelVendor: `${version}CONF_RENDERING_MODEL_VENDOR`,
9
+ renderingUseTurbo: `${version}CONF_RENDERING_USE_TURBO`,
10
+ llmVendor: `${version}CONF_LLM_MODEL_VENDOR`,
11
+ huggingFaceOAuth: `${version}CONF_AUTH_HF_OAUTH`,
12
+ huggingfaceApiKey: `${version}CONF_AUTH_HF_API_TOKEN`,
13
+ huggingfaceInferenceApiModel: `${version}CONF_RENDERING_HF_INFERENCE_API_BASE_MODEL`,
14
+ huggingfaceInferenceApiModelTrigger: `${version}CONF_RENDERING_HF_INFERENCE_API_BASE_MODEL_TRIGGER`,
15
+ huggingfaceInferenceApiFileType: `${version}CONF_RENDERING_HF_INFERENCE_API_FILE_TYPE`,
16
+ replicateApiKey: `${version}CONF_AUTH_REPLICATE_API_TOKEN`,
17
+ replicateApiModel: `${version}CONF_RENDERING_REPLICATE_API_MODEL`,
18
+ replicateApiModelVersion: `${version}CONF_RENDERING_REPLICATE_API_MODEL_VERSION`,
19
+ replicateApiModelTrigger: `${version}CONF_RENDERING_REPLICATE_API_MODEL_TRIGGER`,
20
+ openaiApiKey: `${version}CONF_AUTH_OPENAI_API_KEY`,
21
+ openaiApiModel: `${version}CONF_AUTH_OPENAI_API_MODEL`,
22
+ openaiApiLanguageModel: `${version}CONF_AUTH_OPENAI_API_LANGUAGE_MODEL`,
23
+ groqApiKey: `${version}CONF_AUTH_GROQ_API_KEY`,
24
+ groqApiLanguageModel: `${version}CONF_AUTH_GROQ_API_LANGUAGE_MODEL`,
25
+ anthropicApiKey: `${version}CONF_AUTH_ANTHROPIC_API_KEY`,
26
+ anthropicApiLanguageModel: `${version}CONF_AUTH_ANTHROPIC_API_LANGUAGE_MODEL`,
27
+ hasGeneratedAtLeastOnce: `${version}CONF_HAS_GENERATED_AT_LEAST_ONCE`,
28
+ userDefinedMaxNumberOfPages: `${version}CONF_USER_DEFINED_MAX_NUMBER_OF_PAGES`,
29
  }
src/app/interface/settings-dialog/section-title.tsx ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { ReactNode } from "react"
2
+
3
+ import { cn } from "@/lib/utils"
4
+
5
+ export function SectionTitle({ className, children }: {
6
+ className?: string
7
+ children: ReactNode
8
+ }) {
9
+ return (
10
+ <div className={cn(
11
+ `flex flex-col items-center justify-center`,
12
+ `mt-6 pt-3 pb w-full`,
13
+ `border-t border-t-stone-400`,
14
+ `text-xl font-semibold text-zinc-900`,
15
+ className
16
+ )}>
17
+ {children}
18
+ </div>
19
+ )
20
+ }
src/app/interface/top-menu/index.tsx CHANGED
@@ -41,6 +41,14 @@ const layoutIcons: Partial<Record<LayoutName, StaticImageData>> = {
41
  }
42
 
43
  export function TopMenu() {
 
 
 
 
 
 
 
 
44
  // const font = useStore(state => state.font)
45
  // const setFont = useStore(state => state.setFont)
46
  const preset = useStore(state => state.preset)
@@ -63,21 +71,14 @@ export function TopMenu() {
63
 
64
  const [lastDraftPromptA, setLastDraftPromptA] = useLocalStorage<string>(
65
  "AI_COMIC_FACTORY_LAST_DRAFT_PROMPT_A",
66
- ""
67
  )
68
 
69
  const [lastDraftPromptB, setLastDraftPromptB] = useLocalStorage<string>(
70
  "AI_COMIC_FACTORY_LAST_DRAFT_PROMPT_B",
71
- ""
72
  )
73
 
74
- const searchParams = useSearchParams()
75
-
76
- const requestedPreset = (searchParams?.get('preset') as PresetName) || defaultPreset
77
- const requestedFont = (searchParams?.get('font') as FontName) || defaultFont
78
- const requestedPrompt = (searchParams?.get('prompt') as string) || ""
79
- const requestedLayout = (searchParams?.get('layout') as LayoutName) || defaultLayout
80
-
81
  const [draftPromptA, setDraftPromptA] = useState(lastDraftPromptA)
82
  const [draftPromptB, setDraftPromptB] = useState(lastDraftPromptB)
83
  const draftPrompt = `${draftPromptA}||${draftPromptB}`
 
41
  }
42
 
43
  export function TopMenu() {
44
+ const searchParams = useSearchParams()
45
+
46
+ const requestedPreset = (searchParams?.get('preset') as PresetName) || defaultPreset
47
+ const requestedFont = (searchParams?.get('font') as FontName) || defaultFont
48
+ const requestedStylePrompt = (searchParams?.get('stylePrompt') as string) || ""
49
+ const requestedStoryPrompt = (searchParams?.get('storyPrompt') as string) || ""
50
+ const requestedLayout = (searchParams?.get('layout') as LayoutName) || defaultLayout
51
+
52
  // const font = useStore(state => state.font)
53
  // const setFont = useStore(state => state.setFont)
54
  const preset = useStore(state => state.preset)
 
71
 
72
  const [lastDraftPromptA, setLastDraftPromptA] = useLocalStorage<string>(
73
  "AI_COMIC_FACTORY_LAST_DRAFT_PROMPT_A",
74
+ requestedStylePrompt
75
  )
76
 
77
  const [lastDraftPromptB, setLastDraftPromptB] = useLocalStorage<string>(
78
  "AI_COMIC_FACTORY_LAST_DRAFT_PROMPT_B",
79
+ requestedStoryPrompt
80
  )
81
 
 
 
 
 
 
 
 
82
  const [draftPromptA, setDraftPromptA] = useState(lastDraftPromptA)
83
  const [draftPromptB, setDraftPromptB] = useState(lastDraftPromptB)
84
  const draftPrompt = `${draftPromptA}||${draftPromptB}`
src/app/main.tsx CHANGED
@@ -19,11 +19,12 @@ import { getStoryContinuation } from "./queries/getStoryContinuation"
19
  import { localStorageKeys } from "./interface/settings-dialog/localStorageKeys"
20
  import { defaultSettings } from "./interface/settings-dialog/defaultSettings"
21
  import { SignUpCTA } from "./interface/sign-up-cta"
22
- import { sleep } from "@/lib/sleep"
23
 
24
  export default function Main() {
25
  const [_isPending, startTransition] = useTransition()
26
 
 
27
  const { config, isConfigReady } = useDynamicConfig()
28
  const isGeneratingStory = useStore(s => s.isGeneratingStory)
29
  const setGeneratingStory = useStore(s => s.setGeneratingStory)
@@ -89,7 +90,7 @@ export default function Main() {
89
  showNextPageButton
90
  }, null, 2))
91
  */
92
-
93
  useEffect(() => {
94
  if (maxNbPages !== userDefinedMaxNumberOfPages) {
95
  setMaxNbPages(userDefinedMaxNumberOfPages)
@@ -189,6 +190,8 @@ export default function Main() {
189
  // existing panels are critical here: this is how we can
190
  // continue over an existing story
191
  existingPanels: ref.current.existingPanels,
 
 
192
  })
193
  // console.log("LLM generated some new panels:", candidatePanels)
194
 
 
19
  import { localStorageKeys } from "./interface/settings-dialog/localStorageKeys"
20
  import { defaultSettings } from "./interface/settings-dialog/defaultSettings"
21
  import { SignUpCTA } from "./interface/sign-up-cta"
22
+ import { useLLMVendorConfig } from "@/lib/useLLMVendorConfig"
23
 
24
  export default function Main() {
25
  const [_isPending, startTransition] = useTransition()
26
 
27
+ const llmVendorConfig = useLLMVendorConfig()
28
  const { config, isConfigReady } = useDynamicConfig()
29
  const isGeneratingStory = useStore(s => s.isGeneratingStory)
30
  const setGeneratingStory = useStore(s => s.setGeneratingStory)
 
90
  showNextPageButton
91
  }, null, 2))
92
  */
93
+
94
  useEffect(() => {
95
  if (maxNbPages !== userDefinedMaxNumberOfPages) {
96
  setMaxNbPages(userDefinedMaxNumberOfPages)
 
190
  // existing panels are critical here: this is how we can
191
  // continue over an existing story
192
  existingPanels: ref.current.existingPanels,
193
+
194
+ llmVendorConfig,
195
  })
196
  // console.log("LLM generated some new panels:", candidatePanels)
197
 
src/app/page.tsx CHANGED
@@ -1,16 +1,19 @@
1
  "use server"
2
 
 
3
  import Head from "next/head"
 
4
 
5
- import Main from "./main"
6
  import { TooltipProvider } from "@/components/ui/tooltip"
7
- import Script from "next/script"
8
  import { cn } from "@/lib/utils"
 
 
 
9
  // import { Maintenance } from "./interface/maintenance"
10
 
11
  // https://nextjs.org/docs/pages/building-your-application/optimizing/fonts
12
 
13
- export default async function IndexPage({ params: { ownerId } }: { params: { ownerId: string }}) {
14
  return (
15
  <>
16
  <Head>
@@ -22,22 +25,29 @@ export default async function IndexPage({ params: { ownerId } }: { params: { own
22
  `light fixed inset-0 w-screen h-screen flex flex-col items-center`,
23
  `bg-zinc-50 text-stone-900 overflow-y-scroll`,
24
 
25
- // important: in "print" mode we need to allowing going out of the screen
26
  `inset-auto print:h-auto print:w-auto print:overflow-visible print:relative print:flex-none`
27
  )}>
28
  <TooltipProvider delayDuration={100}>
29
 
30
  <Main />
31
- {/* <Maintenance /> */}
 
 
 
 
 
 
 
32
 
33
  </TooltipProvider>
 
34
  <Script src="https://www.googletagmanager.com/gtag/js?id=GTM-WH4MGSHS" />
35
  <Script id="google-analytics">
36
  {`
37
  window.dataLayer = window.dataLayer || [];
38
  function gtag(){dataLayer.push(arguments);}
39
  gtag('js', new Date());
40
-
41
  gtag('config', 'GTM-WH4MGSHS');
42
  `}
43
  </Script>
 
1
  "use server"
2
 
3
+ import { ComponentProps } from "react"
4
  import Head from "next/head"
5
+ import Script from "next/script"
6
 
 
7
  import { TooltipProvider } from "@/components/ui/tooltip"
 
8
  import { cn } from "@/lib/utils"
9
+
10
+ import Main from "./main"
11
+
12
  // import { Maintenance } from "./interface/maintenance"
13
 
14
  // https://nextjs.org/docs/pages/building-your-application/optimizing/fonts
15
 
16
+ export default async function IndexPage() {
17
  return (
18
  <>
19
  <Head>
 
25
  `light fixed inset-0 w-screen h-screen flex flex-col items-center`,
26
  `bg-zinc-50 text-stone-900 overflow-y-scroll`,
27
 
28
+ // important: in "print" mode we need to allow going out of the screen
29
  `inset-auto print:h-auto print:w-auto print:overflow-visible print:relative print:flex-none`
30
  )}>
31
  <TooltipProvider delayDuration={100}>
32
 
33
  <Main />
34
+
35
+ {/*
36
+
37
+ to display a maintenance page, hide <Main /> and uncomment this unstead:
38
+
39
+ <Maintenance />
40
+
41
+ */}
42
 
43
  </TooltipProvider>
44
+
45
  <Script src="https://www.googletagmanager.com/gtag/js?id=GTM-WH4MGSHS" />
46
  <Script id="google-analytics">
47
  {`
48
  window.dataLayer = window.dataLayer || [];
49
  function gtag(){dataLayer.push(arguments);}
50
  gtag('js', new Date());
 
51
  gtag('config', 'GTM-WH4MGSHS');
52
  `}
53
  </Script>
src/app/queries/getLLMEngineFunction.ts ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { LLMEngine } from "@/types"
2
+ import { predict as predictWithHuggingFace } from "./predictWithHuggingFace"
3
+ import { predict as predictWithOpenAI } from "./predictWithOpenAI"
4
+ import { predict as predictWithGroq } from "./predictWithGroq"
5
+ import { predict as predictWithAnthropic } from "./predictWithAnthropic"
6
+
7
+ export const defaultLLMEngineName = `${process.env.LLM_ENGINE || ""}` as LLMEngine
8
+
9
+ export function getLLMEngineFunction(llmEngineName: LLMEngine = defaultLLMEngineName) {
10
+ const llmEngineFunction =
11
+ llmEngineName === "GROQ" ? predictWithGroq :
12
+ llmEngineName === "ANTHROPIC" ? predictWithAnthropic :
13
+ llmEngineName === "OPENAI" ? predictWithOpenAI :
14
+ predictWithHuggingFace
15
+
16
+ return llmEngineFunction
17
+ }
18
+
19
+ export const defaultLLMEngineFunction = getLLMEngineFunction()
src/app/queries/getStoryContinuation.ts CHANGED
@@ -1,5 +1,5 @@
1
  import { Preset } from "../engine/presets"
2
- import { GeneratedPanel } from "@/types"
3
  import { predictNextPanels } from "./predictNextPanels"
4
  import { joinWords } from "@/lib/joinWords"
5
  import { sleep } from "@/lib/sleep"
@@ -11,6 +11,7 @@ export const getStoryContinuation = async ({
11
  nbPanelsToGenerate,
12
  maxNbPanels,
13
  existingPanels = [],
 
14
  }: {
15
  preset: Preset;
16
  stylePrompt?: string;
@@ -18,6 +19,7 @@ export const getStoryContinuation = async ({
18
  nbPanelsToGenerate: number;
19
  maxNbPanels: number;
20
  existingPanels?: GeneratedPanel[];
 
21
  }): Promise<GeneratedPanel[]> => {
22
 
23
  let panels: GeneratedPanel[] = []
@@ -34,6 +36,7 @@ export const getStoryContinuation = async ({
34
  nbPanelsToGenerate,
35
  maxNbPanels,
36
  existingPanels,
 
37
  })
38
 
39
  // console.log("LLM responded with panelCandidates:", panelCandidates)
 
1
  import { Preset } from "../engine/presets"
2
+ import { GeneratedPanel, LLMVendorConfig } from "@/types"
3
  import { predictNextPanels } from "./predictNextPanels"
4
  import { joinWords } from "@/lib/joinWords"
5
  import { sleep } from "@/lib/sleep"
 
11
  nbPanelsToGenerate,
12
  maxNbPanels,
13
  existingPanels = [],
14
+ llmVendorConfig
15
  }: {
16
  preset: Preset;
17
  stylePrompt?: string;
 
19
  nbPanelsToGenerate: number;
20
  maxNbPanels: number;
21
  existingPanels?: GeneratedPanel[];
22
+ llmVendorConfig: LLMVendorConfig
23
  }): Promise<GeneratedPanel[]> => {
24
 
25
  let panels: GeneratedPanel[] = []
 
36
  nbPanelsToGenerate,
37
  maxNbPanels,
38
  existingPanels,
39
+ llmVendorConfig,
40
  })
41
 
42
  // console.log("LLM responded with panelCandidates:", panelCandidates)
src/app/queries/predict.ts CHANGED
@@ -1,15 +1,23 @@
1
  "use server"
2
 
3
- import { LLMEngine } from "@/types"
4
- import { predict as predictWithHuggingFace } from "./predictWithHuggingFace"
5
- import { predict as predictWithOpenAI } from "./predictWithOpenAI"
6
- import { predict as predictWithGroq } from "./predictWithGroq"
7
- import { predict as predictWithAnthropic } from "./predictWithAnthropic"
8
-
9
- const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
10
-
11
- export const predict =
12
- llmEngine === "GROQ" ? predictWithGroq :
13
- llmEngine === "ANTHROPIC" ? predictWithAnthropic :
14
- llmEngine === "OPENAI" ? predictWithOpenAI :
15
- predictWithHuggingFace
 
 
 
 
 
 
 
 
 
1
  "use server"
2
 
3
+ import { LLMEngine, LLMPredictionFunctionParams } from "@/types"
4
+ import { defaultLLMEngineName, getLLMEngineFunction } from "./getLLMEngineFunction"
5
+
6
+ export async function predict(params: LLMPredictionFunctionParams): Promise<string> {
7
+ const { llmVendorConfig: { vendor } } = params
8
+ // LLMVendor = what the user configure in the UI (eg. a dropdown item called default server)
9
+ // LLMEngine = the actual engine to use (eg. hugging face)
10
+ const llmEngineName: LLMEngine =
11
+ vendor === "ANTHROPIC" ? "ANTHROPIC" :
12
+ vendor === "GROQ" ? "GROQ" :
13
+ vendor === "OPENAI" ? "OPENAI" :
14
+ defaultLLMEngineName
15
+
16
+ const llmEngineFunction = getLLMEngineFunction(llmEngineName)
17
+
18
+ // console.log("predict: using " + llmEngineName)
19
+ const results = await llmEngineFunction(params)
20
+
21
+ // console.log("predict: result: " + results)
22
+ return results
23
+ }
src/app/queries/predictNextPanels.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { GeneratedPanel } from "@/types"
2
  import { cleanJson } from "@/lib/cleanJson"
3
  import { dirtyGeneratedPanelCleaner } from "@/lib/dirtyGeneratedPanelCleaner"
4
  import { dirtyGeneratedPanelsParser } from "@/lib/dirtyGeneratedPanelsParser"
@@ -15,12 +15,14 @@ export const predictNextPanels = async ({
15
  nbPanelsToGenerate,
16
  maxNbPanels,
17
  existingPanels = [],
 
18
  }: {
19
- preset: Preset;
20
- prompt: string;
21
- nbPanelsToGenerate: number;
22
- maxNbPanels: number;
23
- existingPanels: GeneratedPanel[];
 
24
  }): Promise<GeneratedPanel[]> => {
25
  // console.log("predictNextPanels: ", { prompt, nbPanelsToGenerate })
26
  // throw new Error("Planned maintenance")
@@ -61,7 +63,12 @@ export const predictNextPanels = async ({
61
 
62
  try {
63
  // console.log(`calling predict:`, { systemPrompt, userPrompt, nbMaxNewTokens })
64
- result = `${await predict({ systemPrompt, userPrompt, nbMaxNewTokens })}`.trim()
 
 
 
 
 
65
  console.log("LLM result (1st trial):", result)
66
  if (!result.length) {
67
  throw new Error("empty result on 1st trial!")
@@ -72,7 +79,12 @@ export const predictNextPanels = async ({
72
  await sleep(2000)
73
 
74
  try {
75
- result = `${await predict({ systemPrompt: systemPrompt + " \n ", userPrompt, nbMaxNewTokens })}`.trim()
 
 
 
 
 
76
  console.log("LLM result (2nd trial):", result)
77
  if (!result.length) {
78
  throw new Error("empty result on 2nd trial!")
 
1
+ import { GeneratedPanel, LLMVendorConfig } from "@/types"
2
  import { cleanJson } from "@/lib/cleanJson"
3
  import { dirtyGeneratedPanelCleaner } from "@/lib/dirtyGeneratedPanelCleaner"
4
  import { dirtyGeneratedPanelsParser } from "@/lib/dirtyGeneratedPanelsParser"
 
15
  nbPanelsToGenerate,
16
  maxNbPanels,
17
  existingPanels = [],
18
+ llmVendorConfig,
19
  }: {
20
+ preset: Preset
21
+ prompt: string
22
+ nbPanelsToGenerate: number
23
+ maxNbPanels: number
24
+ existingPanels: GeneratedPanel[]
25
+ llmVendorConfig: LLMVendorConfig
26
  }): Promise<GeneratedPanel[]> => {
27
  // console.log("predictNextPanels: ", { prompt, nbPanelsToGenerate })
28
  // throw new Error("Planned maintenance")
 
63
 
64
  try {
65
  // console.log(`calling predict:`, { systemPrompt, userPrompt, nbMaxNewTokens })
66
+ result = `${await predict({
67
+ systemPrompt,
68
+ userPrompt,
69
+ nbMaxNewTokens,
70
+ llmVendorConfig
71
+ })}`.trim()
72
  console.log("LLM result (1st trial):", result)
73
  if (!result.length) {
74
  throw new Error("empty result on 1st trial!")
 
79
  await sleep(2000)
80
 
81
  try {
82
+ result = `${await predict({
83
+ systemPrompt: systemPrompt + " \n ",
84
+ userPrompt,
85
+ nbMaxNewTokens,
86
+ llmVendorConfig
87
+ })}`.trim()
88
  console.log("LLM result (2nd trial):", result)
89
  if (!result.length) {
90
  throw new Error("empty result on 2nd trial!")
src/app/queries/predictWithAnthropic.ts CHANGED
@@ -1,5 +1,6 @@
1
  "use server"
2
 
 
3
  import Anthropic from '@anthropic-ai/sdk';
4
  import { MessageParam } from '@anthropic-ai/sdk/resources';
5
 
@@ -7,13 +8,18 @@ export async function predict({
7
  systemPrompt,
8
  userPrompt,
9
  nbMaxNewTokens,
10
- }: {
11
- systemPrompt: string
12
- userPrompt: string
13
- nbMaxNewTokens: number
14
- }): Promise<string> {
15
- const anthropicApiKey = `${process.env.AUTH_ANTHROPIC_API_KEY || ""}`
16
- const anthropicApiModel = `${process.env.LLM_ANTHROPIC_API_MODEL || "claude-3-opus-20240229"}`
 
 
 
 
 
17
 
18
  const anthropic = new Anthropic({
19
  apiKey: anthropicApiKey,
 
1
  "use server"
2
 
3
+ import { LLMPredictionFunctionParams } from '@/types';
4
  import Anthropic from '@anthropic-ai/sdk';
5
  import { MessageParam } from '@anthropic-ai/sdk/resources';
6
 
 
8
  systemPrompt,
9
  userPrompt,
10
  nbMaxNewTokens,
11
+ llmVendorConfig
12
+ }: LLMPredictionFunctionParams): Promise<string> {
13
+ const anthropicApiKey = `${
14
+ llmVendorConfig.apiKey ||
15
+ process.env.AUTH_ANTHROPIC_API_KEY ||
16
+ ""
17
+ }`
18
+ const anthropicApiModel = `${
19
+ llmVendorConfig.modelId ||
20
+ process.env.LLM_ANTHROPIC_API_MODEL ||
21
+ "claude-3-opus-20240229"
22
+ }`
23
 
24
  const anthropic = new Anthropic({
25
  apiKey: anthropicApiKey,
src/app/queries/predictWithGroq.ts CHANGED
@@ -1,19 +1,25 @@
1
  "use server"
2
 
 
3
  import Groq from "groq-sdk"
4
 
5
  export async function predict({
6
  systemPrompt,
7
  userPrompt,
8
  nbMaxNewTokens,
9
- }: {
10
- systemPrompt: string
11
- userPrompt: string
12
- nbMaxNewTokens: number
13
- }): Promise<string> {
14
- const groqApiKey = `${process.env.AUTH_GROQ_API_KEY || ""}`
15
- const groqApiModel = `${process.env.LLM_GROQ_API_MODEL || "mixtral-8x7b-32768"}`
16
-
 
 
 
 
 
17
  const groq = new Groq({
18
  apiKey: groqApiKey,
19
  })
 
1
  "use server"
2
 
3
+ import { LLMPredictionFunctionParams } from "@/types"
4
  import Groq from "groq-sdk"
5
 
6
  export async function predict({
7
  systemPrompt,
8
  userPrompt,
9
  nbMaxNewTokens,
10
+ llmVendorConfig
11
+ }: LLMPredictionFunctionParams): Promise<string> {
12
+ const groqApiKey = `${
13
+ llmVendorConfig.apiKey ||
14
+ process.env.AUTH_GROQ_API_KEY ||
15
+ ""
16
+ }`
17
+ const groqApiModel = `${
18
+ llmVendorConfig.modelId ||
19
+ process.env.LLM_GROQ_API_MODEL ||
20
+ "mixtral-8x7b-32768"
21
+ }`
22
+
23
  const groq = new Groq({
24
  apiKey: groqApiKey,
25
  })
src/app/queries/predictWithHuggingFace.ts CHANGED
@@ -1,18 +1,16 @@
1
  "use server"
2
 
3
  import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
4
- import { LLMEngine } from "@/types"
5
  import { createZephyrPrompt } from "@/lib/createZephyrPrompt"
6
 
7
  export async function predict({
8
  systemPrompt,
9
  userPrompt,
10
  nbMaxNewTokens,
11
- }: {
12
- systemPrompt: string
13
- userPrompt: string
14
- nbMaxNewTokens: number
15
- }): Promise<string> {
16
  const hf = new HfInference(process.env.AUTH_HF_API_TOKEN)
17
 
18
  const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
 
1
  "use server"
2
 
3
  import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
4
+ import { LLMEngine, LLMPredictionFunctionParams } from "@/types"
5
  import { createZephyrPrompt } from "@/lib/createZephyrPrompt"
6
 
7
  export async function predict({
8
  systemPrompt,
9
  userPrompt,
10
  nbMaxNewTokens,
11
+ // llmVendorConfig // <-- arbitrary/custom LLM models hosted on HF is not supported yet using the UI
12
+ }: LLMPredictionFunctionParams): Promise<string> {
13
+
 
 
14
  const hf = new HfInference(process.env.AUTH_HF_API_TOKEN)
15
 
16
  const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
src/app/queries/predictWithOpenAI.ts CHANGED
@@ -2,20 +2,27 @@
2
 
3
  import type { ChatCompletionMessageParam } from "openai/resources/chat"
4
  import OpenAI from "openai"
 
5
 
6
  export async function predict({
7
  systemPrompt,
8
  userPrompt,
9
  nbMaxNewTokens,
10
- }: {
11
- systemPrompt: string
12
- userPrompt: string
13
- nbMaxNewTokens: number
14
- }): Promise<string> {
15
- const openaiApiKey = `${process.env.AUTH_OPENAI_API_KEY || ""}`
 
 
 
 
 
 
 
16
  const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
17
- const openaiApiModel = `${process.env.LLM_OPENAI_API_MODEL || "gpt-3.5-turbo"}`
18
-
19
  const openai = new OpenAI({
20
  apiKey: openaiApiKey,
21
  baseURL: openaiApiBaseUrl,
 
2
 
3
  import type { ChatCompletionMessageParam } from "openai/resources/chat"
4
  import OpenAI from "openai"
5
+ import { LLMPredictionFunctionParams } from "@/types"
6
 
7
  export async function predict({
8
  systemPrompt,
9
  userPrompt,
10
  nbMaxNewTokens,
11
+ llmVendorConfig
12
+ }: LLMPredictionFunctionParams): Promise<string> {
13
+ const openaiApiKey = `${
14
+ llmVendorConfig.apiKey ||
15
+ process.env.AUTH_OPENAI_API_KEY ||
16
+ ""
17
+ }`
18
+ const openaiApiModel = `${
19
+ llmVendorConfig.modelId ||
20
+ process.env.LLM_OPENAI_API_MODEL ||
21
+ "gpt-4-turbo-preview"
22
+ }`
23
+
24
  const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
25
+
 
26
  const openai = new OpenAI({
27
  apiKey: openaiApiKey,
28
  baseURL: openaiApiBaseUrl,
src/app/store/index.ts CHANGED
@@ -7,6 +7,7 @@ import { FontName } from "@/lib/fonts"
7
  import { Preset, PresetName, defaultPreset, getPreset, getRandomPreset } from "@/app/engine/presets"
8
  import { RenderedScene } from "@/types"
9
  import { LayoutName, defaultLayout, getRandomLayoutName } from "../layouts"
 
10
 
11
  export const useStore = create<{
12
  prompt: string
@@ -70,14 +71,17 @@ export const useStore = create<{
70
 
71
  generate: (prompt: string, presetName: PresetName, layoutName: LayoutName) => void
72
  }>((set, get) => ({
73
- prompt: "",
 
 
 
74
  font: "actionman",
75
- preset: getPreset(defaultPreset),
76
 
77
  currentNbPanelsPerPage: 4,
78
  maxNbPanelsPerPage: 4,
79
  currentNbPages: 1,
80
- maxNbPages: 1,
81
  previousNbPanels: 0,
82
  currentNbPanels: 4,
83
  maxNbPanels: 4,
@@ -86,14 +90,14 @@ export const useStore = create<{
86
  captions: [],
87
  upscaleQueue: {} as Record<string, RenderedScene>,
88
  renderedScenes: {} as Record<string, RenderedScene>,
89
- showCaptions: false,
90
 
91
  // deprecated?
92
  layout: defaultLayout,
93
 
94
  layouts: [defaultLayout, defaultLayout, defaultLayout, defaultLayout],
95
 
96
- zoomLevel: 60,
97
 
98
  // deprecated?
99
  page: undefined as unknown as HTMLDivElement,
 
7
  import { Preset, PresetName, defaultPreset, getPreset, getRandomPreset } from "@/app/engine/presets"
8
  import { RenderedScene } from "@/types"
9
  import { LayoutName, defaultLayout, getRandomLayoutName } from "../layouts"
10
+ import { getParam } from "@/lib/getParam"
11
 
12
  export const useStore = create<{
13
  prompt: string
 
71
 
72
  generate: (prompt: string, presetName: PresetName, layoutName: LayoutName) => void
73
  }>((set, get) => ({
74
+ prompt:
75
+ (getParam("stylePrompt", "") || getParam("storyPrompt", ""))
76
+ ? `${getParam("stylePrompt", "")}||${getParam("storyPrompt", "")}`
77
+ : "",
78
  font: "actionman",
79
+ preset: getPreset(getParam("preset", defaultPreset)),
80
 
81
  currentNbPanelsPerPage: 4,
82
  maxNbPanelsPerPage: 4,
83
  currentNbPages: 1,
84
+ maxNbPages: getParam("maxNbPages", 1),
85
  previousNbPanels: 0,
86
  currentNbPanels: 4,
87
  maxNbPanels: 4,
 
90
  captions: [],
91
  upscaleQueue: {} as Record<string, RenderedScene>,
92
  renderedScenes: {} as Record<string, RenderedScene>,
93
+ showCaptions: getParam("showCaptions", false),
94
 
95
  // deprecated?
96
  layout: defaultLayout,
97
 
98
  layouts: [defaultLayout, defaultLayout, defaultLayout, defaultLayout],
99
 
100
+ zoomLevel: getParam("zoomLevel", 60),
101
 
102
  // deprecated?
103
  page: undefined as unknown as HTMLDivElement,
src/lib/getParam.ts ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import queryString from 'query-string'
2
+
3
+ export function getParam<T>(name: string, defaultValue: T): T {
4
+ try {
5
+ const params = queryString.parseUrl(
6
+ typeof window !== "undefined" ? (window.location.href || "") : ""
7
+ )
8
+ const stringValue = params.query[name]?.toString() || `${defaultValue || ""}`
9
+ if (typeof defaultValue === "number") {
10
+ return Number(stringValue) as T
11
+ } else if (typeof defaultValue === "boolean") {
12
+ return Boolean(stringValue) as T
13
+ } else {
14
+ return stringValue as T
15
+ }
16
+ } catch (err) {
17
+ return defaultValue
18
+ }
19
+ }
src/lib/useLLMVendorConfig.ts ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useLocalStorage } from "usehooks-ts"
2
+
3
+ import { defaultSettings } from "@/app/interface/settings-dialog/defaultSettings"
4
+ import { localStorageKeys } from "@/app/interface/settings-dialog/localStorageKeys"
5
+ import { LLMEngine, LLMVendor, LLMVendorConfig } from "@/types"
6
+
7
+ export function useLLMVendorConfig(): LLMVendorConfig {
8
+
9
+ const [vendor, ] = useLocalStorage<LLMVendor>(
10
+ localStorageKeys.llmVendor,
11
+ defaultSettings.llmVendor
12
+ )
13
+ const [openaiApiKey, ] = useLocalStorage<string>(
14
+ localStorageKeys.openaiApiKey,
15
+ defaultSettings.openaiApiKey
16
+ )
17
+ const [openaiApiLanguageModel, ] = useLocalStorage<string>(
18
+ localStorageKeys.openaiApiLanguageModel,
19
+ defaultSettings.openaiApiLanguageModel
20
+ )
21
+ const [groqApiKey, ] = useLocalStorage<string>(
22
+ localStorageKeys.groqApiKey,
23
+ defaultSettings.groqApiKey
24
+ )
25
+ const [groqApiLanguageModel, ] = useLocalStorage<string>(
26
+ localStorageKeys.groqApiLanguageModel,
27
+ defaultSettings.groqApiLanguageModel
28
+ )
29
+ const [anthropicApiKey, ] = useLocalStorage<string>(
30
+ localStorageKeys.anthropicApiKey,
31
+ defaultSettings.anthropicApiKey
32
+ )
33
+ const [anthropicApiLanguageModel, ] = useLocalStorage<string>(
34
+ localStorageKeys.anthropicApiLanguageModel,
35
+ defaultSettings.anthropicApiLanguageModel
36
+ )
37
+
38
+ const apiKey =
39
+ vendor === "ANTHROPIC" ? anthropicApiKey :
40
+ vendor === "GROQ" ? groqApiKey :
41
+ vendor === "OPENAI" ? openaiApiKey :
42
+ ""
43
+
44
+ const modelId =
45
+ vendor === "ANTHROPIC" ? anthropicApiLanguageModel :
46
+ vendor === "GROQ" ? groqApiLanguageModel :
47
+ vendor === "OPENAI" ? openaiApiLanguageModel :
48
+ ""
49
+
50
+ return {
51
+ vendor,
52
+ apiKey,
53
+ modelId,
54
+ }
55
+ }
src/lib/useOAuth.ts CHANGED
@@ -7,6 +7,8 @@ import { OAuthResult, oauthHandleRedirectIfPresent, oauthLoginUrl } from "@huggi
7
  import { usePersistedOAuth } from "./usePersistedOAuth"
8
  import { getValidOAuth } from "./getValidOAuth"
9
  import { useDynamicConfig } from "./useDynamicConfig"
 
 
10
 
11
  export function useOAuth({
12
  debug = false
@@ -33,7 +35,6 @@ export function useOAuth({
33
  const redirectUrl = config.oauthRedirectUrl
34
  const scopes = config.oauthScopes
35
  const enableOAuth = config.enableHuggingFaceOAuth
36
- const enableOAuthWall = config.enableHuggingFaceOAuthWall
37
 
38
  const searchParams = useSearchParams()
39
  const code = searchParams?.get("code") || ""
@@ -41,9 +42,13 @@ export function useOAuth({
41
 
42
  const hasReceivedFreshOAuth = Boolean(code && state)
43
 
 
 
44
  const canLogin: boolean = Boolean(isConfigReady && clientId && enableOAuth)
45
  const isLoggedIn = Boolean(oauthResult)
46
 
 
 
47
  if (debug) {
48
  console.log("useOAuth debug:", {
49
  oauthResult,
 
7
  import { usePersistedOAuth } from "./usePersistedOAuth"
8
  import { getValidOAuth } from "./getValidOAuth"
9
  import { useDynamicConfig } from "./useDynamicConfig"
10
+ import { useLocalStorage } from "usehooks-ts"
11
+ import { useShouldDisplayLoginWall } from "./useShouldDisplayLoginWall"
12
 
13
  export function useOAuth({
14
  debug = false
 
35
  const redirectUrl = config.oauthRedirectUrl
36
  const scopes = config.oauthScopes
37
  const enableOAuth = config.enableHuggingFaceOAuth
 
38
 
39
  const searchParams = useSearchParams()
40
  const code = searchParams?.get("code") || ""
 
42
 
43
  const hasReceivedFreshOAuth = Boolean(code && state)
44
 
45
+ // note: being able to log into hugging face using the popup
46
+ // is different from seeing the "login wall"
47
  const canLogin: boolean = Boolean(isConfigReady && clientId && enableOAuth)
48
  const isLoggedIn = Boolean(oauthResult)
49
 
50
+ const enableOAuthWall = useShouldDisplayLoginWall()
51
+
52
  if (debug) {
53
  console.log("useOAuth debug:", {
54
  oauthResult,
src/lib/useShouldDisplayLoginWall.ts ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useLocalStorage } from "usehooks-ts"
2
+
3
+ import { LLMVendor, RenderingModelVendor } from "@/types"
4
+ import { localStorageKeys } from "@/app/interface/settings-dialog/localStorageKeys"
5
+ import { defaultSettings } from "@/app/interface/settings-dialog/defaultSettings"
6
+
7
+ import { useDynamicConfig } from "./useDynamicConfig"
8
+
9
+ // we don't want to display the login wall to people forking the project,
10
+ // or to people who selected no hugging face server at all
11
+ export function useShouldDisplayLoginWall() {
12
+ const { config, isConfigReady } = useDynamicConfig()
13
+
14
+ const clientId = config.oauthClientId
15
+ const enableOAuth = config.enableHuggingFaceOAuth
16
+ const enableOAuthWall = config.enableHuggingFaceOAuthWall
17
+
18
+ const isConfigEnablingOAuthWall = Boolean(
19
+ clientId &&
20
+ enableOAuth &&
21
+ enableOAuthWall
22
+ )
23
+
24
+ const [renderingModelVendor,] = useLocalStorage<RenderingModelVendor>(
25
+ localStorageKeys.renderingModelVendor,
26
+ defaultSettings.renderingModelVendor
27
+ )
28
+ const [llmVendor,] = useLocalStorage<LLMVendor>(
29
+ localStorageKeys.llmVendor,
30
+ defaultSettings.llmVendor
31
+ )
32
+
33
+ const isUsingOneOfTheDefaultServices =
34
+ renderingModelVendor === "SERVER" ||
35
+ llmVendor === "SERVER"
36
+
37
+
38
+ const shouldDisplayLoginWall =
39
+ isConfigReady &&
40
+ isConfigEnablingOAuthWall &&
41
+ isUsingOneOfTheDefaultServices
42
+
43
+ return shouldDisplayLoginWall
44
+ }
src/types.ts CHANGED
@@ -95,6 +95,8 @@ export type GeneratedPanel = {
95
 
96
  export type GeneratedPanels = GeneratedPanel[]
97
 
 
 
98
  export type LLMEngine =
99
  | "INFERENCE_API"
100
  | "INFERENCE_ENDPOINT"
@@ -103,19 +105,40 @@ export type LLMEngine =
103
  | "GROQ"
104
  | "ANTHROPIC"
105
 
106
- export type RenderingEngine =
107
  | "VIDEOCHAIN"
108
  | "OPENAI"
109
  | "REPLICATE"
110
  | "INFERENCE_API"
111
  | "INFERENCE_ENDPOINT"
112
 
113
- export type RenderingModelVendor =
114
  | "SERVER"
115
  | "OPENAI"
116
  | "REPLICATE"
117
  | "HUGGINGFACE"
118
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  export type PostVisibility =
120
  | "featured" // featured by admins
121
  | "trending" // top trending / received more than 10 upvotes
@@ -160,6 +183,7 @@ export type LayoutProps = {
160
  export type Settings = {
161
  renderingModelVendor: RenderingModelVendor
162
  renderingUseTurbo: boolean
 
163
  huggingFaceOAuth: string
164
  huggingfaceApiKey: string
165
  huggingfaceInferenceApiModel: string
@@ -174,6 +198,8 @@ export type Settings = {
174
  openaiApiLanguageModel: string
175
  groqApiKey: string
176
  groqApiLanguageModel: string
 
 
177
  hasGeneratedAtLeastOnce: boolean
178
  userDefinedMaxNumberOfPages: number
179
  }
 
95
 
96
  export type GeneratedPanels = GeneratedPanel[]
97
 
98
+ // LLMVendor = what the user configure in the UI (eg. a dropdown item called default server)
99
+ // LLMEngine = the actual engine to use (eg. hugging face)
100
  export type LLMEngine =
101
  | "INFERENCE_API"
102
  | "INFERENCE_ENDPOINT"
 
105
  | "GROQ"
106
  | "ANTHROPIC"
107
 
108
+ export type RenderingEngine =
109
  | "VIDEOCHAIN"
110
  | "OPENAI"
111
  | "REPLICATE"
112
  | "INFERENCE_API"
113
  | "INFERENCE_ENDPOINT"
114
 
115
+ export type RenderingModelVendor =
116
  | "SERVER"
117
  | "OPENAI"
118
  | "REPLICATE"
119
  | "HUGGINGFACE"
120
 
121
+ // LLMVendor = what the user configure in the UI (eg. a dropdown item called default server)
122
+ // LLMEngine = the actual engine to use (eg. hugging face)
123
+ export type LLMVendor =
124
+ | "SERVER"
125
+ | "OPENAI"
126
+ | "GROQ"
127
+ | "ANTHROPIC"
128
+
129
+ export type LLMVendorConfig = {
130
+ vendor: LLMVendor
131
+ apiKey: string
132
+ modelId: string
133
+ }
134
+
135
+ export type LLMPredictionFunctionParams = {
136
+ systemPrompt: string
137
+ userPrompt: string
138
+ nbMaxNewTokens: number
139
+ llmVendorConfig: LLMVendorConfig
140
+ }
141
+
142
  export type PostVisibility =
143
  | "featured" // featured by admins
144
  | "trending" // top trending / received more than 10 upvotes
 
183
  export type Settings = {
184
  renderingModelVendor: RenderingModelVendor
185
  renderingUseTurbo: boolean
186
+ llmVendor: LLMVendor
187
  huggingFaceOAuth: string
188
  huggingfaceApiKey: string
189
  huggingfaceInferenceApiModel: string
 
198
  openaiApiLanguageModel: string
199
  groqApiKey: string
200
  groqApiLanguageModel: string
201
+ anthropicApiKey: string
202
+ anthropicApiLanguageModel: string
203
  hasGeneratedAtLeastOnce: boolean
204
  userDefinedMaxNumberOfPages: number
205
  }