Spaces:
Running
Running
use disk storage instead of cache
Browse files- .gitignore +2 -1
- routes/prompt/index.js +39 -14
.gitignore
CHANGED
@@ -56,4 +56,5 @@ profile-*
|
|
56 |
profile*
|
57 |
*clinic*
|
58 |
*flamegraph*
|
59 |
-
.env
|
|
|
|
56 |
profile*
|
57 |
*clinic*
|
58 |
*flamegraph*
|
59 |
+
.env
|
60 |
+
uploads/*
|
routes/prompt/index.js
CHANGED
@@ -1,44 +1,69 @@
|
|
1 |
'use strict'
|
2 |
|
3 |
-
const nodeCache = require('node-cache');
|
4 |
const dotenv = require('dotenv');
|
|
|
|
|
5 |
const HfInference = require('@huggingface/inference').HfInference;
|
6 |
|
7 |
dotenv.config();
|
8 |
|
9 |
const inference = new HfInference(process.env.HF_TOKEN);
|
10 |
-
const cache = new nodeCache(
|
11 |
-
{
|
12 |
-
stdTTL: 60 * 60 * 24,
|
13 |
-
checkperiod: 60 * 60,
|
14 |
-
useClones: false
|
15 |
-
}
|
16 |
-
);
|
17 |
|
18 |
const REPO_NAME = "black-forest-labs/FLUX.1-schnell"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
module.exports = async function (fastify, opts) {
|
21 |
fastify.get('/:inputs', async function (request, reply) {
|
22 |
const { inputs } = request.params;
|
23 |
-
const slug = inputs.replace(/[^a-zA-Z0-9]/g, '');
|
24 |
|
25 |
-
|
26 |
-
|
27 |
-
|
|
|
28 |
return reply
|
29 |
.header('Content-Type', 'image/jpeg')
|
30 |
-
.send(
|
31 |
}
|
32 |
|
|
|
|
|
|
|
33 |
const hfRequest = await inference.textToImage({
|
34 |
inputs,
|
35 |
model: REPO_NAME,
|
|
|
|
|
|
|
|
|
36 |
})
|
37 |
|
38 |
const buffer = await hfRequest.arrayBuffer();
|
39 |
const array = new Uint8Array(buffer);
|
40 |
|
41 |
-
|
|
|
|
|
42 |
|
43 |
return reply
|
44 |
.header('Content-Type', 'image/jpeg')
|
|
|
1 |
'use strict'
|
2 |
|
|
|
3 |
const dotenv = require('dotenv');
|
4 |
+
const fs = require('fs').promises;
|
5 |
+
|
6 |
const HfInference = require('@huggingface/inference').HfInference;
|
7 |
|
8 |
dotenv.config();
|
9 |
|
10 |
const inference = new HfInference(process.env.HF_TOKEN);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
const REPO_NAME = "black-forest-labs/FLUX.1-schnell"
|
13 |
+
const IMAGE_SIZES = {
|
14 |
+
"square": {
|
15 |
+
height: 512,
|
16 |
+
width: 512
|
17 |
+
},
|
18 |
+
"portrait-3_4": {
|
19 |
+
height: 512,
|
20 |
+
width: 384
|
21 |
+
},
|
22 |
+
"portrait-9_16": {
|
23 |
+
height: 512,
|
24 |
+
width: 288
|
25 |
+
},
|
26 |
+
"landscape-4_3": {
|
27 |
+
height: 384,
|
28 |
+
width: 512
|
29 |
+
},
|
30 |
+
"landscape-16-9": {
|
31 |
+
height: 288,
|
32 |
+
width: 512
|
33 |
+
}
|
34 |
+
}
|
35 |
|
36 |
module.exports = async function (fastify, opts) {
|
37 |
fastify.get('/:inputs', async function (request, reply) {
|
38 |
const { inputs } = request.params;
|
|
|
39 |
|
40 |
+
const slug = inputs.replace(/[^a-zA-Z0-9-_ ]/g, "").replace(/ /g, "-");
|
41 |
+
|
42 |
+
const file = await fs.readFile(process.env.PUBLIC_FILE_UPLOAD_DIR + "/" + slug + ".png")?.catch(() => null)
|
43 |
+
if (file) {
|
44 |
return reply
|
45 |
.header('Content-Type', 'image/jpeg')
|
46 |
+
.send(file);
|
47 |
}
|
48 |
|
49 |
+
let size = inputs.split(" ").find(i => Object.keys(IMAGE_SIZES).includes(i)) || "square";
|
50 |
+
const { height, width } = IMAGE_SIZES[size];
|
51 |
+
|
52 |
const hfRequest = await inference.textToImage({
|
53 |
inputs,
|
54 |
model: REPO_NAME,
|
55 |
+
parameters: {
|
56 |
+
height,
|
57 |
+
width
|
58 |
+
}
|
59 |
})
|
60 |
|
61 |
const buffer = await hfRequest.arrayBuffer();
|
62 |
const array = new Uint8Array(buffer);
|
63 |
|
64 |
+
const dir = await fs.opendir(process.env.PUBLIC_FILE_UPLOAD_DIR).catch(() => null)
|
65 |
+
if (!dir) await fs.mkdir(process.env.PUBLIC_FILE_UPLOAD_DIR)
|
66 |
+
await fs.writeFile(process.env.PUBLIC_FILE_UPLOAD_DIR + "/" + slug + ".png", array)
|
67 |
|
68 |
return reply
|
69 |
.header('Content-Type', 'image/jpeg')
|