Make lorax openai endpoint working (#755)
Browse filesmake base model for lorax inference working
Co-authored-by: Nathan Sarrazin <sarrazin.nathan@gmail.com>
- src/lib/server/models.ts +1 -1
src/lib/server/models.ts
CHANGED
@@ -22,7 +22,7 @@ const modelConfig = z.object({
|
|
22 |
/** Used as an identifier in DB */
|
23 |
id: z.string().optional(),
|
24 |
/** Used to link to the model page, and for inference */
|
25 |
-
name: z.string().
|
26 |
displayName: z.string().min(1).optional(),
|
27 |
description: z.string().min(1).optional(),
|
28 |
websiteUrl: z.string().url().optional(),
|
|
|
22 |
/** Used as an identifier in DB */
|
23 |
id: z.string().optional(),
|
24 |
/** Used to link to the model page, and for inference */
|
25 |
+
name: z.string().default(""),
|
26 |
displayName: z.string().min(1).optional(),
|
27 |
description: z.string().min(1).optional(),
|
28 |
websiteUrl: z.string().url().optional(),
|