Spaces:
Running
Running
Update
Browse files
models.py
CHANGED
@@ -66,6 +66,13 @@ def get_cached_model(
|
|
66 |
)
|
67 |
return model, params_cpu
|
68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
def generate(
|
71 |
model_name: str, sampler: str, image: PIL.Image.Image, prompt: str
|
@@ -90,8 +97,16 @@ def generate(
|
|
90 |
n_ubatch=512,
|
91 |
n_batch=512,
|
92 |
)
|
93 |
-
print(prompt)
|
94 |
return model.create_chat_completion(messages=[{
|
95 |
"role": "user",
|
96 |
-
"content":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
}])["choices"][0]["message"]["content"]
|
|
|
66 |
)
|
67 |
return model, params_cpu
|
68 |
|
69 |
+
def pil_image_to_base64(image: PIL.Image.Image) -> str:
|
70 |
+
"""Converts PIL image to base64."""
|
71 |
+
import io
|
72 |
+
import base64
|
73 |
+
buffered = io.BytesIO()
|
74 |
+
image.save(buffered, format='JPEG')
|
75 |
+
return base64.b64encode(buffered.getvalue()).decode('utf-8')
|
76 |
|
77 |
def generate(
|
78 |
model_name: str, sampler: str, image: PIL.Image.Image, prompt: str
|
|
|
97 |
n_ubatch=512,
|
98 |
n_batch=512,
|
99 |
)
|
|
|
100 |
return model.create_chat_completion(messages=[{
|
101 |
"role": "user",
|
102 |
+
"content": [
|
103 |
+
{
|
104 |
+
"type": "text",
|
105 |
+
"text": prompt
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"type": "image_url",
|
109 |
+
"image_url": "data:image/jpeg;base64," + pil_image_to_base64(image)
|
110 |
+
}
|
111 |
+
]
|
112 |
}])["choices"][0]["message"]["content"]
|