jbilcke-hf HF staff commited on
Commit
6821e3e
1 Parent(s): 6da9beb

making progress on the AI Stories Factory

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .env +7 -0
  2. package-lock.json +1 -0
  3. package.json +1 -0
  4. src/app/layout.tsx +1 -1
  5. src/app/main.tsx +18 -1
  6. src/app/page.tsx +1 -1
  7. src/app/server/aitube/generateClap.ts +44 -0
  8. src/app/server/config.ts +3 -0
  9. src/app/server/services/background.ts +0 -50
  10. src/app/server/services/inpaint.ts +0 -107
  11. src/app/server/services/segment.ts +0 -138
  12. src/app/server/services/stableCascade.ts +0 -67
  13. src/app/server/services/upscale.ts +0 -105
  14. src/app/server/utils/alphaToWhite.ts +0 -34
  15. src/app/server/utils/segmentToInpaintingMask.ts +0 -35
  16. src/app/server/utils/segmentsToInpaintingMasks.ts +0 -78
  17. src/components/form/input-field.tsx +1 -1
  18. src/components/form/select-field.tsx +1 -1
  19. src/components/form/slider-field.tsx +1 -1
  20. src/components/form/textarea-field.tsx +1 -1
  21. src/components/ui/accordion.tsx +1 -1
  22. src/components/ui/alert.tsx +1 -1
  23. src/components/ui/avatar.tsx +1 -1
  24. src/components/ui/badge.tsx +1 -1
  25. src/components/ui/button.tsx +1 -1
  26. src/components/ui/card.tsx +1 -1
  27. src/components/ui/checkbox.tsx +1 -1
  28. src/components/ui/command.tsx +1 -1
  29. src/components/ui/dialog.tsx +1 -1
  30. src/components/ui/dropdown-menu.tsx +1 -1
  31. src/components/ui/input.tsx +1 -1
  32. src/components/ui/label.tsx +1 -1
  33. src/components/ui/menubar.tsx +1 -1
  34. src/components/ui/popover.tsx +1 -1
  35. src/components/ui/select.tsx +1 -1
  36. src/components/ui/separator.tsx +1 -1
  37. src/components/ui/slider.tsx +1 -1
  38. src/components/ui/switch.tsx +1 -1
  39. src/components/ui/table.tsx +1 -1
  40. src/components/ui/tabs.tsx +1 -1
  41. src/components/ui/textarea.tsx +1 -1
  42. src/components/ui/toast.tsx +1 -1
  43. src/components/ui/tooltip.tsx +1 -1
  44. src/components/ui/vertical-slider.tsx +1 -1
  45. src/{app/server/utils → lib/base64}/addBase64HeaderToJpeg.ts +0 -0
  46. src/{app/server/utils → lib/base64}/addBase64HeaderToPng.ts +0 -0
  47. src/lib/base64/blobToDataUri.ts +21 -0
  48. src/lib/base64/dataUriToBlob.ts +15 -0
  49. src/lib/clap/clap-specification-draft.md +162 -0
  50. src/lib/clap/clapToDataUri.ts +11 -0
.env CHANGED
@@ -1,2 +1,9 @@
1
  HF_API_TOKEN="<USE YOUR OWN>"
2
  MICROSERVICE_API_SECRET_TOKEN="<USE YOUR OWN>"
 
 
 
 
 
 
 
 
1
  HF_API_TOKEN="<USE YOUR OWN>"
2
  MICROSERVICE_API_SECRET_TOKEN="<USE YOUR OWN>"
3
+
4
+ AI_TUBE_URL="https://aitube.at"
5
+ # AI_TUBE_URL="http://localhost:3000"
6
+
7
+ AI_TUBE_API_SECRET_JWT_KEY=""
8
+ AI_TUBE_API_SECRET_JWT_ISSUER=""
9
+ AI_TUBE_API_SECRET_JWT_AUDIENCE=""
package-lock.json CHANGED
@@ -54,6 +54,7 @@
54
  "typescript": "5.4.5",
55
  "usehooks-ts": "^2.14.0",
56
  "uuid": "^9.0.1",
 
57
  "zustand": "^4.5.1"
58
  }
59
  },
 
54
  "typescript": "5.4.5",
55
  "usehooks-ts": "^2.14.0",
56
  "uuid": "^9.0.1",
57
+ "yaml": "^2.4.1",
58
  "zustand": "^4.5.1"
59
  }
60
  },
package.json CHANGED
@@ -55,6 +55,7 @@
55
  "typescript": "5.4.5",
56
  "usehooks-ts": "^2.14.0",
57
  "uuid": "^9.0.1",
 
58
  "zustand": "^4.5.1"
59
  }
60
  }
 
55
  "typescript": "5.4.5",
56
  "usehooks-ts": "^2.14.0",
57
  "uuid": "^9.0.1",
58
+ "yaml": "^2.4.1",
59
  "zustand": "^4.5.1"
60
  }
61
  }
src/app/layout.tsx CHANGED
@@ -1,4 +1,4 @@
1
- import { cn } from '@/lib/utils'
2
  import './globals.css'
3
  import type { Metadata } from 'next'
4
  import { inter, salsa } from './fonts'
 
1
+ import { cn } from '@/lib/utils/cn'
2
  import './globals.css'
3
  import type { Metadata } from 'next'
4
  import { inter, salsa } from './fonts'
src/app/main.tsx CHANGED
@@ -6,12 +6,13 @@ import { Card, CardContent, CardHeader } from '@/components/ui/card'
6
  import { Button } from '@/components/ui/button'
7
  import { InputField } from '@/components/form/input-field'
8
  import { Toaster } from '@/components/ui/sonner'
9
- import { cn } from '@/lib/utils'
10
 
11
  import { useStore } from './store'
12
  import { TextareaField } from '@/components/form/textarea-field'
13
  import { DeviceFrameset } from 'react-device-frameset'
14
  import 'react-device-frameset/styles/marvel-devices.min.css'
 
15
 
16
  export function Main() {
17
  const [_isPending, startTransition] = useTransition()
@@ -39,7 +40,23 @@ export function Main() {
39
  const isBusy = status === "generating" || hasPendingTasks
40
 
41
  const handleSubmit = async () => {
 
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  }
44
 
45
  return (
 
6
  import { Button } from '@/components/ui/button'
7
  import { InputField } from '@/components/form/input-field'
8
  import { Toaster } from '@/components/ui/sonner'
9
+ import { cn } from '@/lib/utils/cn'
10
 
11
  import { useStore } from './store'
12
  import { TextareaField } from '@/components/form/textarea-field'
13
  import { DeviceFrameset } from 'react-device-frameset'
14
  import 'react-device-frameset/styles/marvel-devices.min.css'
15
+ import { generateClap } from './server/aitube/generateClap'
16
 
17
  export function Main() {
18
  const [_isPending, startTransition] = useTransition()
 
40
  const isBusy = status === "generating" || hasPendingTasks
41
 
42
  const handleSubmit = async () => {
43
+ const prompt = storyPromptDraft
44
 
45
+ setStatus("generating")
46
+ setStoryPrompt(prompt)
47
+
48
+ startTransition(async () => {
49
+ console.log(`handleSubmit(): generating a clap using prompt = "${prompt}" `)
50
+
51
+ try {
52
+ const clap = await generateClap({ prompt })
53
+
54
+ console.log(`handleSubmit(): received a clap = `, clap)
55
+ setStatus("finished")
56
+ } catch (err) {
57
+ setStatus("error")
58
+ }
59
+ })
60
  }
61
 
62
  return (
src/app/page.tsx CHANGED
@@ -4,7 +4,7 @@ import { useEffect, useState } from "react"
4
  import Head from "next/head"
5
  import Script from "next/script"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  import { Main } from "./main"
10
 
 
4
  import Head from "next/head"
5
  import Script from "next/script"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  import { Main } from "./main"
10
 
src/app/server/aitube/generateClap.ts ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use server"
2
+
3
+ import { parseClap } from "@/lib/clap/parseClap"
4
+ import { ClapProject } from "@/lib/clap/types"
5
+
6
+ import { aitubeApiUrl } from "../config"
7
+
8
+ export async function generateClap({
9
+ prompt = "",
10
+ }: {
11
+ prompt: string
12
+ }): Promise<ClapProject> {
13
+ if (!prompt) { throw new Error(`please provide a prompt`) }
14
+
15
+ // AiTube Stories is nice, but we also need to leave some compute for AiTube Live and AiTube Gaming
16
+ const height = 1024
17
+ const width = 512
18
+
19
+ // console.log(`calling `+ gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict")
20
+
21
+ // remember: a space needs to be public for the classic fetch() to work
22
+ const res = await fetch(aitubeApiUrl, {
23
+ method: "POST",
24
+ headers: {
25
+ "Content-Type": "application/json",
26
+ // TODO pass the JWT so that only the AI Stories Factory can call the API
27
+ // Authorization: `Bearer ${hfApiToken}`,
28
+ },
29
+ body: JSON.stringify({
30
+ prompt,
31
+ width,
32
+ height
33
+ }),
34
+ cache: "no-store",
35
+ // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
36
+ // next: { revalidate: 1 }
37
+ })
38
+
39
+ const blob = await res.blob()
40
+
41
+ const clap = await parseClap(blob)
42
+
43
+ return clap
44
+ }
src/app/server/config.ts CHANGED
@@ -1,2 +1,5 @@
1
 
2
  export const serverHuggingfaceApiKey = `${process.env.HF_API_TOKEN || ""}`
 
 
 
 
1
 
2
  export const serverHuggingfaceApiKey = `${process.env.HF_API_TOKEN || ""}`
3
+
4
+ export const aitubeUrl = `${process.env.AI_TUBE_URL || "" }`
5
+ export const aitubeApiUrl = + aitubeUrl + (aitubeUrl.endsWith("/") ? "" : "/") + "api/"
src/app/server/services/background.ts DELETED
@@ -1,50 +0,0 @@
1
- "use server"
2
-
3
- import { BackgroundRemovalParams } from "@/types"
4
-
5
- import { addBase64HeaderToPng } from "../utils/addBase64HeaderToPng"
6
-
7
- const gradioApi = `https://jbilcke-hf-background-removal-api.hf.space`
8
- const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
9
-
10
- export async function removeBackground({
11
- imageAsBase64,
12
- }: BackgroundRemovalParams): Promise<string> {
13
-
14
- // remember: a space needs to be public for the classic fetch() to work
15
- const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
16
- method: "POST",
17
- headers: {
18
- "Content-Type": "application/json",
19
- // Authorization: `Bearer ${hfApiToken}`,
20
- },
21
- body: JSON.stringify({
22
- fn_index: 0, // <- is it 0 or 1?
23
- data: [
24
- microserviceApiKey,
25
- imageAsBase64,
26
- ],
27
- }),
28
- cache: "no-store",
29
- // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
30
- // next: { revalidate: 1 }
31
- })
32
-
33
- const { data } = await res.json()
34
-
35
- // console.log("data:", data)
36
- // Recommendation: handle errors
37
- if (res.status !== 200 || !Array.isArray(data)) {
38
- // This will activate the closest `error.js` Error Boundary
39
- throw new Error(`Failed to fetch data (status: ${res.status})`)
40
- }
41
- // console.log("data:", data.slice(0, 50))
42
-
43
- const base64Content = (data?.[0] || "") as string
44
-
45
- if (!base64Content) {
46
- throw new Error(`invalid response (no content)`)
47
- }
48
-
49
- return addBase64HeaderToPng(base64Content)
50
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/services/inpaint.ts DELETED
@@ -1,107 +0,0 @@
1
- "use server"
2
-
3
- import { InpaintingParams } from "@/types"
4
-
5
- import { addBase64HeaderToPng } from "../utils/addBase64HeaderToPng"
6
- import { segmentToInpaintingMask } from "../utils/segmentToInpaintingMask"
7
-
8
- const gradioApi = `https://jbilcke-hf-inpainting-api.hf.space`
9
- const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
10
-
11
- export async function inpaint({
12
- imageAsBase64,
13
- maskAsBase64,
14
- positivePrompt = "",
15
- negativePrompt = "",
16
- guidanceScale = 7.5,
17
- numInferenceSteps = 20,
18
- strength = 0.99,
19
- scheduler = "EulerDiscreteScheduler"
20
- }: InpaintingParams): Promise<string> {
21
-
22
- const posPrompt = [
23
- positivePrompt,
24
- "clean",
25
- "high-resolution",
26
- "8k",
27
- "best quality",
28
- "masterpiece",
29
- "crisp",
30
- "sharp",
31
- "intricate details"
32
- ].join(", ")
33
-
34
- const negPrompt = [
35
- negativePrompt,
36
- "pixelated",
37
- "pixels",
38
- "noise",
39
- "blur",
40
- "motion blur",
41
- "lowres",
42
- "oversmooth",
43
- "longbody",
44
- "bad anatomy",
45
- "bad hands",
46
- "missing fingers",
47
- "extra digit",
48
- "fewer digits",
49
- "cropped",
50
- "worst quality",
51
- "low quality",
52
- "artificial",
53
- "unrealistic",
54
- "watermark",
55
- "trademark",
56
- "error",
57
- "mistake"
58
- ].join(", ")
59
-
60
- // the segmentation mask is a RGB color one (that's how we can split layers)
61
- // so we first convert it to either black or white
62
- const inpaintingMaskAsBase64 = await segmentToInpaintingMask(maskAsBase64)
63
-
64
- // remember: a space needs to be public for the classic fetch() to work
65
- const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
66
- method: "POST",
67
- headers: {
68
- "Content-Type": "application/json",
69
- // Authorization: `Bearer ${hfApiToken}`,
70
- },
71
- body: JSON.stringify({
72
- fn_index: 0, // <- is it 0 or 1?
73
- data: [
74
- microserviceApiKey,
75
- imageAsBase64, // blob in 'parameter_5' Image component
76
- inpaintingMaskAsBase64,
77
- posPrompt,
78
- negPrompt,
79
- guidanceScale,
80
- numInferenceSteps,
81
- strength,
82
- scheduler,
83
- ],
84
- }),
85
- cache: "no-store",
86
- // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
87
- // next: { revalidate: 1 }
88
- })
89
-
90
- const { data } = await res.json()
91
-
92
- // console.log("data:", data)
93
- // Recommendation: handle errors
94
- if (res.status !== 200 || !Array.isArray(data)) {
95
- // This will activate the closest `error.js` Error Boundary
96
- throw new Error(`Failed to fetch data (status: ${res.status})`)
97
- }
98
- // console.log("data:", data.slice(0, 50))
99
-
100
- const base64Content = (data?.[0] || "") as string
101
-
102
- if (!base64Content) {
103
- throw new Error(`invalid response (no content)`)
104
- }
105
-
106
- return addBase64HeaderToPng(base64Content)
107
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/services/segment.ts DELETED
@@ -1,138 +0,0 @@
1
- "use server"
2
-
3
- import { SemanticLayer, SemanticLayers } from "@/lib/config"
4
-
5
- import { addBase64HeaderToPng } from "../utils/addBase64HeaderToPng"
6
- import { segmentsToInpaintingMasks } from "../utils/segmentsToInpaintingMasks"
7
- import { alphaToWhite } from "../utils/alphaToWhite"
8
-
9
- const gradioApi = `https://jbilcke-hf-segmentation-api.hf.space`
10
- const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
11
-
12
- export type SegmentationResult = {
13
- id: number
14
- box: number[] // [299.63092041015625, 111.72967529296875, 661.6744384765625, 692.8449096679688],
15
- label: string
16
- score: number
17
- color: number[] // [0.8506358185563304, 0.9904733533937202, 0.32005103765589715, 1.0]
18
- }
19
-
20
- type SegmentationApiResponse = {
21
- data: SegmentationResult[]
22
- bitmap: string // base64 png
23
- }
24
-
25
- export type SegmentationResults = {
26
- data: Partial<Record<SemanticLayer, SegmentationResult>>
27
- bitmap: string // base64 png
28
- }
29
-
30
- export async function segment({
31
- imageAsBase64,
32
- layers,
33
- }: {
34
- imageAsBase64: string
35
- layers: SemanticLayers
36
- }): Promise<SemanticLayers> {
37
-
38
- const emptyResponse: SemanticLayers = {}
39
-
40
- Object.entries(layers).forEach(([key, value]) => {
41
- emptyResponse[key as SemanticLayer] = ""
42
- })
43
-
44
- // console.log(`calling `+ gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict")
45
-
46
- const detectionPrompt = Object.keys(layers).map(x => x.trim().toLowerCase()).join(" . ")
47
-
48
- // min 0, max 1, value 0.3, step 0.001
49
- const boxThreshold = 0.3
50
-
51
- // min 0, max 1, value 0.25, step 0.001
52
- const textThreshold = 0.25
53
-
54
- // min 0, max 1, value 0.8, step 0.001
55
- const iouThreshold = 0.8
56
-
57
- // SAM is finicky, it doesn't work on images with an alpha channel
58
- // so we first need to remove that
59
- let imageToSegmentInBase64 = ""
60
- imageToSegmentInBase64 = imageAsBase64
61
- /*
62
- try {
63
- imageToSegmentInBase64 = await alphaToWhite(imageAsBase64)
64
- } catch (err) {
65
- console.error(`failed to get a valid imageToSegmentInBase64:`, err)
66
- return emptyResponse
67
- }
68
- */
69
-
70
- // remember: a space needs to be public for the classic fetch() to work
71
- const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
72
- method: "POST",
73
- headers: {
74
- "Content-Type": "application/json",
75
- // Authorization: `Bearer ${hfApiToken}`,
76
- },
77
- body: JSON.stringify({
78
- fn_index: 0, // <- is it 0 or 1?
79
- data: [
80
- microserviceApiKey,
81
- imageToSegmentInBase64,
82
- detectionPrompt,
83
- boxThreshold,
84
- textThreshold,
85
- iouThreshold,
86
- ],
87
- }),
88
- cache: "no-store",
89
- // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
90
- // next: { revalidate: 1 }
91
- })
92
-
93
- const layeredResults = {} as Partial<Record<SemanticLayer, SegmentationResult>>
94
-
95
- const { data } = await res.json()
96
-
97
- // console.log("data:", data)
98
- // Recommendation: handle errors
99
- if (res.status !== 200 || !Array.isArray(data)) {
100
- // This will activate the closest `error.js` Error Boundary
101
- console.error(`Failed to fetch data (${res.status} error: ${res.statusText})`, res)
102
- return emptyResponse
103
- }
104
- // console.log("data:", data.slice(0, 50))
105
-
106
- let apiResponse: SegmentationApiResponse = {
107
- data: [],
108
- bitmap: ""
109
- }
110
-
111
- try {
112
- apiResponse = JSON.parse((data?.[0] || "{}")) as SegmentationApiResponse
113
- } catch (err) {
114
- console.error(`Failed to parse api response`, err)
115
- return emptyResponse
116
- }
117
-
118
- // console.log("segmentation", segmentation)
119
- // console.log("segmentation.data:", segmentation.data)
120
- const items = [...(apiResponse.data || [])]
121
- // console.log("items:", items)
122
-
123
- const bitmap = apiResponse.bitmap ? addBase64HeaderToPng(apiResponse.bitmap) : ""
124
-
125
- Object.entries(layers).forEach(([key, value]) => {
126
- const match = items.find(x => `${key || ""}`.trim().toLowerCase() === `${x.label || ""}`.trim().toLowerCase())
127
- if (match) {
128
- layeredResults[key as SemanticLayer] = match
129
- }
130
- })
131
-
132
- const maskLayers = await segmentsToInpaintingMasks({
133
- data: layeredResults,
134
- bitmap,
135
- })
136
-
137
- return maskLayers
138
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/services/stableCascade.ts DELETED
@@ -1,67 +0,0 @@
1
- "use server"
2
-
3
- import { generateSeed } from "@/lib/generateSeed"
4
- import { StableCascadeParams } from "@/types"
5
-
6
- import { addBase64HeaderToPng } from "../utils/addBase64HeaderToPng"
7
-
8
- const gradioApi = `https://jbilcke-hf-stable-cascade-api.hf.space`
9
- const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
10
-
11
- export async function stableCascade({
12
- prompt,
13
- negativePrompt,
14
- guidanceScale,
15
- nbPriorInferenceSteps,
16
- nbDecoderInferenceSteps,
17
- seed,
18
- width,
19
- height,
20
- }: StableCascadeParams): Promise<string> {
21
-
22
- // console.log(`calling `+ gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict")
23
-
24
- // remember: a space needs to be public for the classic fetch() to work
25
- const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
26
- method: "POST",
27
- headers: {
28
- "Content-Type": "application/json",
29
- // Authorization: `Bearer ${hfApiToken}`,
30
- },
31
- body: JSON.stringify({
32
- fn_index: 0, // <- is it 0 or 1?
33
- data: [
34
- microserviceApiKey,
35
- prompt,
36
- negativePrompt,
37
- height,
38
- width,
39
- guidanceScale,
40
- seed || generateSeed(),
41
- nbPriorInferenceSteps,
42
- nbDecoderInferenceSteps
43
- ],
44
- }),
45
- cache: "no-store",
46
- // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
47
- // next: { revalidate: 1 }
48
- })
49
-
50
- const { data } = await res.json()
51
-
52
- // console.log("data:", data)
53
- // Recommendation: handle errors
54
- if (res.status !== 200 || !Array.isArray(data)) {
55
- // This will activate the closest `error.js` Error Boundary
56
- throw new Error(`Failed to fetch data (status: ${res.status})`)
57
- }
58
- // console.log("data:", data.slice(0, 50))
59
-
60
- const base64Content = (data?.[0] || "") as string
61
-
62
- if (!base64Content) {
63
- throw new Error(`invalid response (no content)`)
64
- }
65
-
66
- return addBase64HeaderToPng(base64Content)
67
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/services/upscale.ts DELETED
@@ -1,105 +0,0 @@
1
- "use server"
2
-
3
- import { generateSeed } from "@/lib/generateSeed"
4
- import { UpscalingParams } from "@/types"
5
-
6
- import { addBase64HeaderToPng } from "../utils/addBase64HeaderToPng"
7
-
8
- const gradioApi = `https://jbilcke-hf-image-upscaling-api.hf.space`
9
- const microserviceApiKey = `${process.env.MICROSERVICE_API_SECRET_TOKEN || ""}`
10
-
11
- export async function upscale({
12
- imageAsBase64,
13
- prompt,
14
- negativePrompt,
15
- scaleFactor,
16
- nbSteps,
17
- seed,
18
- }: UpscalingParams): Promise<string> {
19
-
20
- const addedPrompt = [
21
- "clean",
22
- "high-resolution",
23
- "8k",
24
- "best quality",
25
- "masterpiece",
26
- "crisp",
27
- "sharp",
28
- "intricate details"
29
- ].join(", ")
30
-
31
- const negPrompt = [
32
- negativePrompt,
33
- "pixelated",
34
- "pixels",
35
- "noise",
36
- "blur",
37
- "motion blur",
38
- "lowres",
39
- "oversmooth",
40
- "longbody",
41
- "bad anatomy",
42
- "bad hands",
43
- "missing fingers",
44
- "extra digit",
45
- "fewer digits",
46
- "cropped",
47
- "worst quality",
48
- "low quality",
49
- "artificial",
50
- "unrealistic",
51
- "watermark",
52
- "trademark",
53
- "error",
54
- "mistake"
55
- ].join(", ")
56
-
57
- const conditioningScale = 1.4
58
- const classifierFreeGuidance = 9.5
59
-
60
- // remember: a space needs to be public for the classic fetch() to work
61
- const res = await fetch(gradioApi + (gradioApi.endsWith("/") ? "" : "/") + "api/predict", {
62
- method: "POST",
63
- headers: {
64
- "Content-Type": "application/json",
65
- // Authorization: `Bearer ${hfApiToken}`,
66
- },
67
- body: JSON.stringify({
68
- fn_index: 0, // <- is it 0 or 1?
69
- data: [
70
- microserviceApiKey,
71
- imageAsBase64, // blob in 'parameter_5' Image component
72
- prompt, // string in 'Prompt' Textbox component
73
- addedPrompt, // string in 'Added Prompt' Textbox component
74
- negPrompt, // string in 'Negative Prompt' Textbox component
75
- nbSteps, // number (numeric value between 10 and 50) in 'Denoise Steps' Slider component
76
- scaleFactor, // number (numeric value between 1 and 4) in 'Upsample Scale' Slider component
77
- conditioningScale, // number (numeric value between 0.5 and 1.5) in 'Conditioning Scale' Slider component
78
- classifierFreeGuidance, // number (numeric value between 0.1 and 10.0) in 'Classier-free Guidance' Slider component
79
- seed || generateSeed(), // number (numeric value between -1 and 2147483647) in 'Seed' Slider component
80
- ],
81
- }),
82
- cache: "no-store",
83
- // we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
84
- // next: { revalidate: 1 }
85
- })
86
-
87
- const { data } = await res.json()
88
-
89
- // Recommendation: handle errors
90
- if (res.status !== 200 || !Array.isArray(data)) {
91
- // This will activate the closest `error.js` Error Boundary
92
- throw new Error(`Failed to fetch data (status: ${res.status})`)
93
- }
94
- // console.log("data:", data.slice(0, 50))
95
-
96
- const base64Content = (data?.[0] || "") as string
97
-
98
- if (!base64Content) {
99
- throw new Error(`invalid response (no content)`)
100
- }
101
-
102
- // console.log("upscaling base64Content:", addBase64HeaderToPng(base64Content).slice(0, 50))
103
-
104
- return addBase64HeaderToPng(base64Content)
105
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/utils/alphaToWhite.ts DELETED
@@ -1,34 +0,0 @@
1
- import Jimp from 'jimp';
2
-
3
- /**
4
- * Convert a PNG with an alpha channel to a PNG with a white background
5
- *
6
- * this also makes sure the image is 1024x1024, as the segmentation algorithm is finicky
7
- * and will fail if this is not respected
8
- * @param dataUri
9
- * @returns
10
- */
11
- export async function alphaToWhite(dataUri: string): Promise<string> {
12
-
13
-
14
- // strip off the "data:image/png;base64," part
15
- const base64Data = dataUri.replace(/^data:image\/\w+;base64,/, "");
16
- if (!base64Data) {
17
- throw new Error(`invalid image, cannot convert from alpha to white background`)
18
- }
19
-
20
- // convert base64 to buffer
21
- const imageData = Buffer.from(base64Data, 'base64');
22
-
23
- // read the image using Jimp
24
- let img = await Jimp.read(imageData);
25
-
26
- img = img.background(0xFFFFFFFF).resize(1024, 1024);
27
-
28
- return new Promise((resolve, reject) => {
29
- img.getBuffer(Jimp.MIME_PNG, (err, buffer) => {
30
- if (err) reject(err);
31
- else resolve(`data:${Jimp.MIME_PNG};base64,${buffer.toString('base64')}`);
32
- });
33
- });
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/utils/segmentToInpaintingMask.ts DELETED
@@ -1,35 +0,0 @@
1
- "use server"
2
-
3
- import Jimp from 'jimp';
4
-
5
- /**
6
- * Converts a segment mask (colored pixels over white)
7
- * into an inpainting mask (black pixels over white)
8
- *
9
- * @param pngBase64
10
- * @returns
11
- */
12
- export async function segmentToInpaintingMask(pngBase64: string) {
13
- const black = 0x00000000;
14
- const white = 0xFFFFFFFF;
15
-
16
- // strip off the "data:image/png;base64," part
17
- const base64Data = pngBase64.replace(/^data:image\/\w+;base64,/, "");
18
-
19
- // convert base64 to buffer
20
- const imageData = Buffer.from(base64Data, 'base64');
21
-
22
- // read the image using Jimp
23
- const image = await Jimp.read(imageData);
24
-
25
- image.scan(0, 0, image.bitmap.width, image.bitmap.height, (x, y, idx) => {
26
- const currentColor = image.getPixelColor(x, y);
27
- if (currentColor !== white) {
28
- image.bitmap.data[idx] = black;
29
- }
30
- });
31
-
32
- // get base64 data
33
- const base64Image = await image.getBase64Async(Jimp.MIME_PNG);
34
- return "data:image/png;base64," + base64Image.split(",")[1];
35
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/app/server/utils/segmentsToInpaintingMasks.ts DELETED
@@ -1,78 +0,0 @@
1
- import Jimp from "jimp"
2
-
3
- import { SemanticLayer, SemanticLayers } from "@/lib/config"
4
-
5
- import { SegmentationResults } from "../segment"
6
-
7
- function getEuclideanDistance(color1: number[], color2: number[]): number {
8
- return Math.sqrt(
9
- Math.pow(color1[0] - color2[0], 2) +
10
- Math.pow(color1[1] - color2[1], 2) +
11
- Math.pow(color1[2] - color2[2], 2)
12
- );
13
- }
14
-
15
- export async function segmentsToInpaintingMasks(segmentationResults: SegmentationResults): Promise<SemanticLayers> {
16
- const image = await Jimp.read(Buffer.from(segmentationResults.bitmap.replace(/^data:image\/\w+;base64,/, ""), 'base64'));
17
-
18
- const resultImages: Partial<Record<SemanticLayer, Jimp>> = {}
19
- // Convert all result images to base64 strings
20
- const base64Images: SemanticLayers = {}
21
-
22
- for (let layer in segmentationResults.data) {
23
- resultImages[layer as SemanticLayer] = new Jimp(image)
24
- base64Images[layer as SemanticLayer] = ""
25
- }
26
-
27
- // Iterate through each pixel in the image
28
- image.scan(0, 0, image.bitmap.width, image.bitmap.height, (x, y, idx) => {
29
- // Get the color of the current pixel
30
- const color = Jimp.intToRGBA(image.getPixelColor(x, y));
31
- const currentColor = [color.r / 255, color.g / 255, color.b / 255];
32
-
33
- // Determine which category the color belongs to
34
- let minDistance = Infinity;
35
- let closestLayer: SemanticLayer | null = null;
36
-
37
- for (let layer in segmentationResults.data) {
38
- const layerColor = segmentationResults.data[layer as SemanticLayer]!.color;
39
- const distance = getEuclideanDistance(currentColor, layerColor);
40
-
41
- if(distance < minDistance) {
42
- minDistance = distance;
43
- closestLayer = layer as SemanticLayer;
44
- }
45
- };
46
-
47
- if (!closestLayer) {
48
- return;
49
- }
50
-
51
- // Set the color of the pixel in the corresponding result image to black, and white in others
52
- for (let layer in resultImages) {
53
- // used to guarantee the !.bitmap
54
- if (!resultImages[layer as SemanticLayer]?.bitmap) {
55
- continue
56
- }
57
-
58
- for (let i = 0; i < 4; i++) {
59
- if (layer === closestLayer) {
60
- if(i < 3)
61
- resultImages[layer as SemanticLayer]!.bitmap.data[idx + i] = 0x00; // set rgb channels to black
62
- else
63
- resultImages[layer as SemanticLayer]!.bitmap.data[idx + i] = 0xFF; // set alpha channel to maximum
64
- } else {
65
- resultImages[layer as SemanticLayer]!.bitmap.data[idx + i] = 0xFF; // set rgba channels to white
66
- }
67
- }
68
- }
69
- });
70
-
71
- // Convert all result images to base64 strings
72
- for (let layer in resultImages) {
73
- const base64Image = await resultImages[layer as SemanticLayer]!.getBase64Async(Jimp.MIME_PNG);
74
- base64Images[layer as SemanticLayer] = "data:image/png;base64," + base64Image.split(",")[1];
75
- }
76
-
77
- return base64Images;
78
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/components/form/input-field.tsx CHANGED
@@ -2,7 +2,7 @@ import { ComponentProps } from "react";
2
 
3
  import { Input } from "@/components/ui/input";
4
  import { Label } from "@/components/ui/label";
5
- import { cn } from "@/lib/utils";
6
 
7
  export function InputField({
8
  label,
 
2
 
3
  import { Input } from "@/components/ui/input";
4
  import { Label } from "@/components/ui/label";
5
+ import { cn } from "@/lib/utils/cn";
6
 
7
  export function InputField({
8
  label,
src/components/form/select-field.tsx CHANGED
@@ -2,7 +2,7 @@ import { ComponentProps } from "react";
2
 
3
  import { Select } from "@/components/ui/select";
4
  import { Label } from "@/components/ui/label";
5
- import { cn } from "@/lib/utils";
6
 
7
  export function SelectField({
8
  label,
 
2
 
3
  import { Select } from "@/components/ui/select";
4
  import { Label } from "@/components/ui/label";
5
+ import { cn } from "@/lib/utils/cn";
6
 
7
  export function SelectField({
8
  label,
src/components/form/slider-field.tsx CHANGED
@@ -2,7 +2,7 @@ import { ComponentProps } from "react";
2
 
3
  import { Slider } from "@/components/ui/slider";
4
  import { Label } from "@/components/ui/label";
5
- import { cn } from "@/lib/utils";
6
 
7
  export function SliderField({
8
  label,
 
2
 
3
  import { Slider } from "@/components/ui/slider";
4
  import { Label } from "@/components/ui/label";
5
+ import { cn } from "@/lib/utils/cn";
6
 
7
  export function SliderField({
8
  label,
src/components/form/textarea-field.tsx CHANGED
@@ -2,7 +2,7 @@ import { ComponentProps } from "react";
2
 
3
  import { Textarea } from "../ui/textarea";
4
  import { Label } from "@/components/ui/label";
5
- import { cn } from "@/lib/utils";
6
 
7
  export function TextareaField({
8
  label,
 
2
 
3
  import { Textarea } from "../ui/textarea";
4
  import { Label } from "@/components/ui/label";
5
+ import { cn } from "@/lib/utils/cn";
6
 
7
  export function TextareaField({
8
  label,
src/components/ui/accordion.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as AccordionPrimitive from "@radix-ui/react-accordion"
5
  import { ChevronDown } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const Accordion = AccordionPrimitive.Root
10
 
 
4
  import * as AccordionPrimitive from "@radix-ui/react-accordion"
5
  import { ChevronDown } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const Accordion = AccordionPrimitive.Root
10
 
src/components/ui/alert.tsx CHANGED
@@ -1,7 +1,7 @@
1
  import * as React from "react"
2
  import { cva, type VariantProps } from "class-variance-authority"
3
 
4
- import { cn } from "@/lib/utils"
5
 
6
  const alertVariants = cva(
7
  "relative w-full rounded-lg border border-stone-200 p-4 [&:has(svg)]:pl-11 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-stone-950 dark:border-stone-800 dark:[&>svg]:text-stone-50",
 
1
  import * as React from "react"
2
  import { cva, type VariantProps } from "class-variance-authority"
3
 
4
+ import { cn } from "@/lib/utils/cn"
5
 
6
  const alertVariants = cva(
7
  "relative w-full rounded-lg border border-stone-200 p-4 [&:has(svg)]:pl-11 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-stone-950 dark:border-stone-800 dark:[&>svg]:text-stone-50",
src/components/ui/avatar.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as AvatarPrimitive from "@radix-ui/react-avatar"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Avatar = React.forwardRef<
9
  React.ElementRef<typeof AvatarPrimitive.Root>,
 
3
  import * as React from "react"
4
  import * as AvatarPrimitive from "@radix-ui/react-avatar"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Avatar = React.forwardRef<
9
  React.ElementRef<typeof AvatarPrimitive.Root>,
src/components/ui/badge.tsx CHANGED
@@ -1,7 +1,7 @@
1
  import * as React from "react"
2
  import { cva, type VariantProps } from "class-variance-authority"
3
 
4
- import { cn } from "@/lib/utils"
5
 
6
  const badgeVariants = cva(
7
  "inline-flex items-center rounded-full border border-stone-200 px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-stone-400 focus:ring-offset-2 dark:border-stone-800 dark:focus:ring-stone-800",
 
1
  import * as React from "react"
2
  import { cva, type VariantProps } from "class-variance-authority"
3
 
4
+ import { cn } from "@/lib/utils/cn"
5
 
6
  const badgeVariants = cva(
7
  "inline-flex items-center rounded-full border border-stone-200 px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-stone-400 focus:ring-offset-2 dark:border-stone-800 dark:focus:ring-stone-800",
src/components/ui/button.tsx CHANGED
@@ -2,7 +2,7 @@ import * as React from "react"
2
  import { Slot } from "@radix-ui/react-slot"
3
  import { cva, type VariantProps } from "class-variance-authority"
4
 
5
- import { cn } from "@/lib/utils"
6
 
7
  const buttonVariants = cva(
8
  "inline-flex items-center justify-center rounded-md text-sm font-medium ring-offset-white transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-stone-400 focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 dark:ring-offset-stone-950 dark:focus-visible:ring-stone-800",
 
2
  import { Slot } from "@radix-ui/react-slot"
3
  import { cva, type VariantProps } from "class-variance-authority"
4
 
5
+ import { cn } from "@/lib/utils/cn"
6
 
7
  const buttonVariants = cva(
8
  "inline-flex items-center justify-center rounded-md text-sm font-medium ring-offset-white transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-stone-400 focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 dark:ring-offset-stone-950 dark:focus-visible:ring-stone-800",
src/components/ui/card.tsx CHANGED
@@ -1,6 +1,6 @@
1
  import * as React from "react"
2
 
3
- import { cn } from "@/lib/utils"
4
 
5
  const Card = React.forwardRef<
6
  HTMLDivElement,
 
1
  import * as React from "react"
2
 
3
+ import { cn } from "@/lib/utils/cn"
4
 
5
  const Card = React.forwardRef<
6
  HTMLDivElement,
src/components/ui/checkbox.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as CheckboxPrimitive from "@radix-ui/react-checkbox"
5
  import { Check } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const Checkbox = React.forwardRef<
10
  React.ElementRef<typeof CheckboxPrimitive.Root>,
 
4
  import * as CheckboxPrimitive from "@radix-ui/react-checkbox"
5
  import { Check } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const Checkbox = React.forwardRef<
10
  React.ElementRef<typeof CheckboxPrimitive.Root>,
src/components/ui/command.tsx CHANGED
@@ -5,7 +5,7 @@ import { DialogProps } from "@radix-ui/react-dialog"
5
  import { Command as CommandPrimitive } from "cmdk"
6
  import { Search } from "lucide-react"
7
 
8
- import { cn } from "@/lib/utils"
9
  import { Dialog, DialogContent } from "@/components/ui/dialog"
10
 
11
  const Command = React.forwardRef<
 
5
  import { Command as CommandPrimitive } from "cmdk"
6
  import { Search } from "lucide-react"
7
 
8
+ import { cn } from "@/lib/utils/cn"
9
  import { Dialog, DialogContent } from "@/components/ui/dialog"
10
 
11
  const Command = React.forwardRef<
src/components/ui/dialog.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as DialogPrimitive from "@radix-ui/react-dialog"
5
  import { X } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const Dialog = DialogPrimitive.Root
10
 
 
4
  import * as DialogPrimitive from "@radix-ui/react-dialog"
5
  import { X } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const Dialog = DialogPrimitive.Root
10
 
src/components/ui/dropdown-menu.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"
5
  import { Check, ChevronRight, Circle } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const DropdownMenu = DropdownMenuPrimitive.Root
10
 
 
4
  import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"
5
  import { Check, ChevronRight, Circle } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const DropdownMenu = DropdownMenuPrimitive.Root
10
 
src/components/ui/input.tsx CHANGED
@@ -1,6 +1,6 @@
1
  import * as React from "react"
2
 
3
- import { cn } from "@/lib/utils"
4
 
5
  export interface InputProps
6
  extends React.InputHTMLAttributes<HTMLInputElement> {}
 
1
  import * as React from "react"
2
 
3
+ import { cn } from "@/lib/utils/cn"
4
 
5
  export interface InputProps
6
  extends React.InputHTMLAttributes<HTMLInputElement> {}
src/components/ui/label.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as LabelPrimitive from "@radix-ui/react-label"
5
  import { cva, type VariantProps } from "class-variance-authority"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const labelVariants = cva(
10
  "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70"
 
4
  import * as LabelPrimitive from "@radix-ui/react-label"
5
  import { cva, type VariantProps } from "class-variance-authority"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const labelVariants = cva(
10
  "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70"
src/components/ui/menubar.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as MenubarPrimitive from "@radix-ui/react-menubar"
5
  import { Check, ChevronRight, Circle } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const MenubarMenu = MenubarPrimitive.Menu
10
 
 
4
  import * as MenubarPrimitive from "@radix-ui/react-menubar"
5
  import { Check, ChevronRight, Circle } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const MenubarMenu = MenubarPrimitive.Menu
10
 
src/components/ui/popover.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as PopoverPrimitive from "@radix-ui/react-popover"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Popover = PopoverPrimitive.Root
9
 
 
3
  import * as React from "react"
4
  import * as PopoverPrimitive from "@radix-ui/react-popover"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Popover = PopoverPrimitive.Root
9
 
src/components/ui/select.tsx CHANGED
@@ -4,7 +4,7 @@ import * as React from "react"
4
  import * as SelectPrimitive from "@radix-ui/react-select"
5
  import { Check, ChevronDown } from "lucide-react"
6
 
7
- import { cn } from "@/lib/utils"
8
 
9
  const Select = SelectPrimitive.Root
10
 
 
4
  import * as SelectPrimitive from "@radix-ui/react-select"
5
  import { Check, ChevronDown } from "lucide-react"
6
 
7
+ import { cn } from "@/lib/utils/cn"
8
 
9
  const Select = SelectPrimitive.Root
10
 
src/components/ui/separator.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as SeparatorPrimitive from "@radix-ui/react-separator"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Separator = React.forwardRef<
9
  React.ElementRef<typeof SeparatorPrimitive.Root>,
 
3
  import * as React from "react"
4
  import * as SeparatorPrimitive from "@radix-ui/react-separator"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Separator = React.forwardRef<
9
  React.ElementRef<typeof SeparatorPrimitive.Root>,
src/components/ui/slider.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as SliderPrimitive from "@radix-ui/react-slider"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Slider = React.forwardRef<
9
  React.ElementRef<typeof SliderPrimitive.Root>,
 
3
  import * as React from "react"
4
  import * as SliderPrimitive from "@radix-ui/react-slider"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Slider = React.forwardRef<
9
  React.ElementRef<typeof SliderPrimitive.Root>,
src/components/ui/switch.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as SwitchPrimitives from "@radix-ui/react-switch"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Switch = React.forwardRef<
9
  React.ElementRef<typeof SwitchPrimitives.Root>,
 
3
  import * as React from "react"
4
  import * as SwitchPrimitives from "@radix-ui/react-switch"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Switch = React.forwardRef<
9
  React.ElementRef<typeof SwitchPrimitives.Root>,
src/components/ui/table.tsx CHANGED
@@ -1,6 +1,6 @@
1
  import * as React from "react"
2
 
3
- import { cn } from "@/lib/utils"
4
 
5
  const Table = React.forwardRef<
6
  HTMLTableElement,
 
1
  import * as React from "react"
2
 
3
+ import { cn } from "@/lib/utils/cn"
4
 
5
  const Table = React.forwardRef<
6
  HTMLTableElement,
src/components/ui/tabs.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as TabsPrimitive from "@radix-ui/react-tabs"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const Tabs = TabsPrimitive.Root
9
 
 
3
  import * as React from "react"
4
  import * as TabsPrimitive from "@radix-ui/react-tabs"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const Tabs = TabsPrimitive.Root
9
 
src/components/ui/textarea.tsx CHANGED
@@ -1,6 +1,6 @@
1
  import * as React from "react"
2
 
3
- import { cn } from "@/lib/utils"
4
 
5
  export interface TextareaProps
6
  extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {}
 
1
  import * as React from "react"
2
 
3
+ import { cn } from "@/lib/utils/cn"
4
 
5
  export interface TextareaProps
6
  extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {}
src/components/ui/toast.tsx CHANGED
@@ -3,7 +3,7 @@ import * as ToastPrimitives from "@radix-ui/react-toast"
3
  import { cva, type VariantProps } from "class-variance-authority"
4
  import { X } from "lucide-react"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const ToastProvider = ToastPrimitives.Provider
9
 
 
3
  import { cva, type VariantProps } from "class-variance-authority"
4
  import { X } from "lucide-react"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const ToastProvider = ToastPrimitives.Provider
9
 
src/components/ui/tooltip.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as TooltipPrimitive from "@radix-ui/react-tooltip"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const TooltipProvider = TooltipPrimitive.Provider
9
 
 
3
  import * as React from "react"
4
  import * as TooltipPrimitive from "@radix-ui/react-tooltip"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const TooltipProvider = TooltipPrimitive.Provider
9
 
src/components/ui/vertical-slider.tsx CHANGED
@@ -3,7 +3,7 @@
3
  import * as React from "react"
4
  import * as SliderPrimitive from "@radix-ui/react-slider"
5
 
6
- import { cn } from "@/lib/utils"
7
 
8
  const VerticalSlider = React.forwardRef<
9
  React.ElementRef<typeof SliderPrimitive.Root>,
 
3
  import * as React from "react"
4
  import * as SliderPrimitive from "@radix-ui/react-slider"
5
 
6
+ import { cn } from "@/lib/utils/cn"
7
 
8
  const VerticalSlider = React.forwardRef<
9
  React.ElementRef<typeof SliderPrimitive.Root>,
src/{app/server/utils → lib/base64}/addBase64HeaderToJpeg.ts RENAMED
File without changes
src/{app/server/utils → lib/base64}/addBase64HeaderToPng.ts RENAMED
File without changes
src/lib/base64/blobToDataUri.ts ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export async function blobToDataUri(blob: Blob, defaultContentType = ""): Promise<string> {
2
+ if (typeof window === "undefined") {
3
+ const arrayBuffer = await blob.arrayBuffer()
4
+ let buffer = Buffer.from(arrayBuffer)
5
+ return "data:" + (defaultContentType || blob.type) + ';base64,' + buffer.toString('base64');
6
+ } else {
7
+ return new Promise<string>((resolve, reject) => {
8
+ const reader = new FileReader()
9
+ reader.onload = _e => {
10
+ let dataUri = `${reader.result as string || ""}`
11
+ if (defaultContentType) {
12
+ dataUri = dataUri.replace("application/octet-stream", defaultContentType)
13
+ }
14
+ resolve(dataUri)
15
+ }
16
+ reader.onerror = _e => reject(reader.error)
17
+ reader.onabort = _e => reject(new Error("Read aborted"))
18
+ reader.readAsDataURL(blob)
19
+ });
20
+ }
21
+ }
src/lib/base64/dataUriToBlob.ts ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ export function dataUriToBlob(dataURI = "", defaultContentType = ""): Blob {
3
+ dataURI = dataURI.replace(/^data:/, '');
4
+
5
+ const type = dataURI.match(/(?:image|application|video|audio|text)\/[^;]+/)?.[0] || defaultContentType;
6
+ const base64 = dataURI.replace(/^[^,]+,/, '');
7
+ const arrayBuffer = new ArrayBuffer(base64.length);
8
+ const typedArray = new Uint8Array(arrayBuffer);
9
+
10
+ for (let i = 0; i < base64.length; i++) {
11
+ typedArray[i] = base64.charCodeAt(i);
12
+ }
13
+
14
+ return new Blob([arrayBuffer], { type });
15
+ }
src/lib/clap/clap-specification-draft.md ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # CLAP Format Specification
2
+
3
+ - Status: DRAFT
4
+ - Document revision: 0.0.1
5
+ - Last updated: Feb 6th, 2024
6
+ - Author(s): Julian BILCKE (@flngr)
7
+
8
+ ## BEFORE YOU READ
9
+
10
+ The CLAP format spec is experimental and not finished yet!
11
+ There might be inconsistencies, unnecessary redundancies or blatant omissions.
12
+
13
+ ## What are CLAP files?
14
+
15
+ The CLAP format (.clap) is a file format designed for AI video projects.
16
+
17
+ It preserves prompts and assets into the same container, making it easier to share an AI video project between different people or applications.
18
+
19
+ ## Structure
20
+
21
+ A CLAP is an array of objects serialized into a YAML text string, then finally compressed using gzip to a binary file.
22
+
23
+ The file extension is `.clap`
24
+ The mime type is `application/x-yaml`
25
+
26
+ There can be 5 different types of objects:
27
+
28
+ - one HEADER
29
+ - one METADATA
30
+ - zero, one or more MODEL(s)
31
+ - zero, one or more SCENE(s)
32
+ - zero, one or more SEGMENT(s)
33
+
34
+ This can be represented in javascript like this:
35
+
36
+ ```javascript
37
+ [
38
+ clapHeader, // one metadata object
39
+ clapMeta, // one metadata object
40
+ ...clapModels, // optional array of models
41
+ ...clapScenes, // optional array of scenes
42
+ ...clapSegments // optional array of segments
43
+ ]
44
+ ```
45
+
46
+ ## Header
47
+
48
+ The HEADER provides information about how to decode a CLAP.
49
+
50
+ Knowing in advance the number of models, scenes and segments helps the decoder parsing the information,
51
+ and in some implementation, help with debugging, logging, and provisioning memory usage.
52
+
53
+ However in the future, it is possible that a different scheme is used, in order to support streaming.
54
+
55
+ Either by recognizing the shape of each object (fields), or by using a specific field eg. a `_type`.
56
+
57
+ ```typescript
58
+ {
59
+ // used to know which format version is used.
60
+ // CLAP is still in development and the format is not fully specified yet,
61
+ // during the period most .clap file will have the "clap-0" format
62
+ format: "clap-0"
63
+
64
+ numberOfModels: number // integer
65
+ numberOfScenes: number // integer
66
+ numberOfSegments: number // integer
67
+ }
68
+ ```
69
+
70
+ ## Metadata
71
+
72
+ ```typescript
73
+ {
74
+ id: string // "<a valid UUID V4>"
75
+ title: string // "project title"
76
+ description: string // "project description"
77
+ licence: string // "information about licensing"
78
+
79
+ // this provides information about the image ratio
80
+ // this might be removed in the final spec, as this
81
+ // can be re-computed from width and height
82
+ orientation: "landscape" | "vertical" | "square"
83
+
84
+ // the expected duration of the project
85
+ durationInMs: number
86
+
87
+ // the suggested width and height of the video
88
+ // note that this is just an indicator,
89
+ // and might be superseeded by the application reading the .clap file
90
+ width: number // integer between 256 and 8192 (value in pixels)
91
+ height: number // integer between 256 and 8192 (value in pixels)
92
+
93
+ // name of the suggested video model to use
94
+ // note that this is just an indicator,
95
+ // and might be superseeded by the application reading the .clap file
96
+ defaultVideoModel: string
97
+
98
+ // additional prompt to use in the video generation
99
+ // this helps adding some magic touch and flair to the videos,
100
+ // but perhaps the field should be renamed
101
+ extraPositivePrompt: string
102
+
103
+ // the screenplay (script) of the video
104
+ screenplay: string
105
+
106
+ // whether to loop the content by default or not
107
+ isLoop: boolean
108
+
109
+ // helper to indicate whether the .clap might contain interactive elements
110
+ isInteractive: boolean
111
+ }
112
+ ```
113
+
114
+ ## Models
115
+
116
+ Before talking about models, first we should describe the concept of entity:
117
+
118
+ in a story, an entity is something (person, place, vehicle, animal, robot, alien, object) with a name, a description of the appearance, an age, mileage or quality, an origin, and so on.
119
+
120
+ An example could be "a giant magical school bus, with appearance of a cat with wheels, and which talks"
121
+
122
+ The CLAP model would be an instance (an interpretation) of this entity, where we would assign it an identity:
123
+ - a name and age
124
+ - a visual style (a photo of the magic school bus cat)
125
+ - a voice style
126
+ - and maybe other things eg. an origin or background story
127
+
128
+ As you can see, it can be difficult to create clearly separated categories, like "vehicule", "character", or "location"
129
+ (the magical cat bus could turn into a location in some scene, a speaking character in another etc)
130
+
131
+ This is why there is a common schema for all models:
132
+
133
+ ```typescript
134
+ {
135
+ id: string
136
+ category: ClapSegmentCategory
137
+ triggerName: string
138
+ label: string
139
+ description: string
140
+ author: string
141
+ thumbnailUrl: string
142
+ seed: number
143
+
144
+ assetSourceType: ClapAssetSource
145
+ assetUrl: string
146
+
147
+ age: number
148
+ gender: ClapModelGender
149
+ region: ClapModelRegion
150
+ appearance: ClapModelAppearance
151
+ voiceVendor: ClapVoiceVendor
152
+ voiceId: string
153
+ }
154
+ ```
155
+
156
+ ## Atomic types
157
+
158
+ ...
159
+
160
+ ## TO BE CONTINUED
161
+
162
+ (you can read "./types.ts" for more information)
src/lib/clap/clapToDataUri.ts ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import { blobToDataUri } from "@/lib/base64/blobToDataUri"
3
+
4
+ import { serializeClap } from "./serializeClap"
5
+ import { ClapProject } from "./types"
6
+
7
+ export async function clapToDataUri(clap: ClapProject): Promise<string> {
8
+ const archive = await serializeClap(clap)
9
+ const dataUri = await blobToDataUri(archive, "application/x-gzip")
10
+ return dataUri
11
+ }