Upload 2 files
Browse files
Danbooru Prompt Selector/TEST2024/NAIA_generation (WEBUIA_0119).py
ADDED
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import random
|
3 |
+
from PIL import Image, ImageOps, ImageTk
|
4 |
+
from datetime import datetime
|
5 |
+
import time
|
6 |
+
from pathlib import Path
|
7 |
+
import io
|
8 |
+
import zipfile
|
9 |
+
|
10 |
+
#for webui
|
11 |
+
import base64
|
12 |
+
import re
|
13 |
+
import json
|
14 |
+
|
15 |
+
BASE_URL="https://api.novelai.net"
|
16 |
+
|
17 |
+
def make_turbo_prompt(gen_request):
|
18 |
+
lines = gen_request['prompt']
|
19 |
+
result = {
|
20 |
+
"boys": False,
|
21 |
+
"girls": False,
|
22 |
+
"1girl": False,
|
23 |
+
"1boy": False,
|
24 |
+
"1other": False,
|
25 |
+
"others": False
|
26 |
+
}
|
27 |
+
state = {
|
28 |
+
"nude,": False,
|
29 |
+
"pov,": False,
|
30 |
+
"cum,": False,
|
31 |
+
"after ": False,
|
32 |
+
"pussy juice": False,
|
33 |
+
"barefoot": False,
|
34 |
+
"breasts": False,
|
35 |
+
"ejaculation": False,
|
36 |
+
}
|
37 |
+
|
38 |
+
def insert_spaces(source_list, reference_list):
|
39 |
+
modified_list = source_list.copy()
|
40 |
+
for index, keyword in enumerate(reference_list):
|
41 |
+
if keyword not in source_list:
|
42 |
+
space_count = len(keyword) # ํค์๋ ๊ธธ์ด๋งํผ์ ๊ณต๋ฐฑ ๋ฌธ์
|
43 |
+
modified_list.insert(index, ' ' * space_count)
|
44 |
+
return modified_list
|
45 |
+
|
46 |
+
keywords = gen_request['prompt'].split(', ')
|
47 |
+
filtered_keywords = []
|
48 |
+
removed_indices = []
|
49 |
+
positive0, positive1, positive2, positive3 = gen_request.copy(),gen_request.copy(),gen_request.copy(),gen_request.copy()
|
50 |
+
|
51 |
+
for word in result.keys():
|
52 |
+
if word in lines:
|
53 |
+
result[word] = True
|
54 |
+
for word in state.keys():
|
55 |
+
if word in gen_request['prompt']:
|
56 |
+
state[word] = True
|
57 |
+
|
58 |
+
key_index = int((len(keywords)/2)-1)
|
59 |
+
|
60 |
+
if(result["1boy"]) or (result["boys"]):
|
61 |
+
if(result["1girl"]):
|
62 |
+
if('sex,' in gen_request['prompt']):
|
63 |
+
sex_pos_keywords = ['stomach bulge','insertion', 'fucked silly', 'x-ray', 'orgasm', 'cross-section', 'uterus', 'overflow', 'rape', 'vaginal', 'anal']
|
64 |
+
facial_keywords = ['tongue','ahegao']
|
65 |
+
temp_sex_pos = []
|
66 |
+
temp_facial = []
|
67 |
+
cum_events = []
|
68 |
+
explicit_check = []
|
69 |
+
if 'open mouth' in keywords: keywords.remove('open mouth')
|
70 |
+
if 'closed mouth' in keywords: keywords.remove('closed mouth')
|
71 |
+
if 'after rape' in keywords:
|
72 |
+
keywords.remove('after rape')
|
73 |
+
explicit_check.append('after rape')
|
74 |
+
for keyword in keywords:
|
75 |
+
if ('sex' not in keyword and 'cum' not in keyword and 'ejaculation' not in keyword and 'vaginal' not in keyword and 'penetration' not in keyword) and all(sex_pos not in keyword for sex_pos in sex_pos_keywords) and all(facial not in keyword for facial in facial_keywords):
|
76 |
+
filtered_keywords.append(keyword)
|
77 |
+
elif 'sex' in keyword:
|
78 |
+
removed_indices.append(keyword)
|
79 |
+
elif 'penetration' in keyword:
|
80 |
+
removed_indices.append(keyword)
|
81 |
+
elif 'cum' in keyword and keyword != 'cum':
|
82 |
+
cum_events.append(keyword)
|
83 |
+
elif any(sex_pos in keyword for sex_pos in sex_pos_keywords):
|
84 |
+
for sex_pos in sex_pos_keywords:
|
85 |
+
if sex_pos in keyword:
|
86 |
+
temp_sex_pos.append(sex_pos)
|
87 |
+
elif any(facial not in keyword for facial in facial_keywords):
|
88 |
+
for facial in facial_keywords:
|
89 |
+
if facial in keyword:
|
90 |
+
temp_facial.append(facial)
|
91 |
+
filtered_keywords.insert(int((len(filtered_keywords)/2)-1), ' no penetration, imminent penetration')
|
92 |
+
filtered_keywords_positive0 = filtered_keywords.copy()
|
93 |
+
filtered_keywords.remove(' no penetration, imminent penetration')
|
94 |
+
#0 imminent penetration, imminent sex
|
95 |
+
for i, keyword in enumerate(filtered_keywords):
|
96 |
+
if 'pantyhose' in keyword:
|
97 |
+
filtered_keywords[i] = 'torn ' + filtered_keywords[i]
|
98 |
+
#1 default
|
99 |
+
key_index = int((len(filtered_keywords)/2)-1)
|
100 |
+
if 'pussy' in filtered_keywords: key_index = filtered_keywords.index('pussy')
|
101 |
+
if 'penis' in filtered_keywords: key_index = filtered_keywords.index('penis')
|
102 |
+
filtered_keywords[key_index:key_index] = ['motion lines', 'surprised']
|
103 |
+
for keyword in removed_indices:
|
104 |
+
if 'cum' not in keyword and 'ejaculation' not in keyword:
|
105 |
+
filtered_keywords.insert(key_index,keyword)
|
106 |
+
if(temp_sex_pos): filtered_keywords[key_index:key_index] = temp_sex_pos
|
107 |
+
if('clothed sex' in filtered_keywords and not 'bottomless' in filtered_keywords): filtered_keywords.insert(filtered_keywords.index('clothed sex')+1, 'bottomless')
|
108 |
+
pos1_copied_keywords = filtered_keywords.copy()
|
109 |
+
for i, keyword in enumerate(pos1_copied_keywords):
|
110 |
+
if 'closed eyes' in keyword:
|
111 |
+
rand_num = random.randint(0,2)
|
112 |
+
if(rand_num == 0): pos1_copied_keywords[i] = 'half-' + pos1_copied_keywords[i]
|
113 |
+
elif(rand_num == 1 and 'closed eyes' in pos1_copied_keywords):
|
114 |
+
pos1_copied_keywords.remove('closed eyes')
|
115 |
+
filtered_keywords[i] = 'half-closed eyes'
|
116 |
+
filtered_keywords_positive1 = pos1_copied_keywords.copy()
|
117 |
+
#2 ejaculation,cum in pussy
|
118 |
+
key_index = filtered_keywords.index('surprised')
|
119 |
+
filtered_keywords.remove('surprised')
|
120 |
+
filtered_keywords[key_index:key_index] = ["ejaculation","cum"]
|
121 |
+
for keyword in removed_indices:
|
122 |
+
if 'cum' in keyword:
|
123 |
+
filtered_keywords.insert(key_index,keyword)
|
124 |
+
if(temp_facial): filtered_keywords[key_index:key_index] =temp_facial
|
125 |
+
filtered_keywords_positive2 = filtered_keywords.copy()
|
126 |
+
#3 after sex, after ejaculation
|
127 |
+
for i, keyword in enumerate(filtered_keywords):
|
128 |
+
if 'closed eyes' in keyword:
|
129 |
+
rand_num = random.randint(0,2)
|
130 |
+
if(rand_num == 0 and filtered_keywords[i] != 'half-closed eyes'): filtered_keywords[i] = 'half-' + filtered_keywords[i]
|
131 |
+
elif(rand_num == 1): filtered_keywords[i] = 'empty eyes'
|
132 |
+
else: filtered_keywords[i] = 'empty eyes, half-closed eyes'
|
133 |
+
if 'sex' in filtered_keywords:
|
134 |
+
key_index = filtered_keywords.index('sex')
|
135 |
+
elif 'group sex' in filtered_keywords:
|
136 |
+
key_index = filtered_keywords.index('group sex')
|
137 |
+
filtered_keywords.remove('ejaculation')
|
138 |
+
filtered_keywords[key_index:key_index] = ['cum drip', 'erection'] + cum_events
|
139 |
+
if(explicit_check): filtered_keywords[key_index:key_index] = explicit_check
|
140 |
+
if 'sex' in filtered_keywords and 'group sex' not in filtered_keywords:
|
141 |
+
if('pussy' in filtered_keywords and not 'anal' in filtered_keywords): filtered_keywords.insert(filtered_keywords.index('sex')+1, 'after vaginal, spread pussy')
|
142 |
+
elif('anal' in filtered_keywords): filtered_keywords.insert(filtered_keywords.index('sex')+1, 'after anus, cum in ass')
|
143 |
+
filtered_keywords.insert(filtered_keywords.index('sex'), 'after sex')
|
144 |
+
filtered_keywords.remove('sex')
|
145 |
+
elif 'group sex' in filtered_keywords:
|
146 |
+
if('vaginal' in filtered_keywords and not 'anal' in filtered_keywords):
|
147 |
+
filtered_keywords.insert(filtered_keywords.index('group sex')+1, 'after vaginal, spread pussy')
|
148 |
+
if 'multiple penises' in filtered_keywords: filtered_keywords.insert(filtered_keywords.index('group sex')+3, 'cum on body, bukkake')
|
149 |
+
elif('anal' in filtered_keywords):
|
150 |
+
filtered_keywords.insert(filtered_keywords.index('group sex')+1, 'after anus, cum in ass')
|
151 |
+
if 'multiple penises' in filtered_keywords: filtered_keywords.insert(filtered_keywords.index('group sex')+3, 'cum on body, bukkake')
|
152 |
+
else: filtered_keywords.insert(filtered_keywords.index('group sex')+1, 'cum on body, {bukkake}')
|
153 |
+
temp_post_keyword = []
|
154 |
+
for keyword in sex_pos_keywords:
|
155 |
+
if not (keyword == 'orgasm' or keyword == 'overflow'):
|
156 |
+
if keyword in filtered_keywords:
|
157 |
+
temp_post_keyword.append(keyword)
|
158 |
+
for keyword in temp_post_keyword:
|
159 |
+
filtered_keywords.remove(keyword)
|
160 |
+
|
161 |
+
positive0['prompt'] = ', '.join(insert_spaces(filtered_keywords_positive0, filtered_keywords)).strip()
|
162 |
+
positive1['prompt'] = ', '.join(insert_spaces(filtered_keywords_positive1, filtered_keywords)).strip()
|
163 |
+
positive2['prompt'] = ', '.join(insert_spaces(filtered_keywords_positive2, filtered_keywords)).strip()
|
164 |
+
positive3['prompt'] = ', '.join(filtered_keywords).strip()
|
165 |
+
positive0["type"] = "turbo"
|
166 |
+
positive1["type"] = "turbo"
|
167 |
+
positive2["type"] = "turbo"
|
168 |
+
positive3["type"] = "turbo"
|
169 |
+
return positive0, positive1, positive2, positive3
|
170 |
+
|
171 |
+
def generate_image_NAI(access_token, prompt, model, action, parameters):
|
172 |
+
data = {
|
173 |
+
"input": prompt,
|
174 |
+
"model": model,
|
175 |
+
"action": action,
|
176 |
+
"parameters": parameters,
|
177 |
+
}
|
178 |
+
|
179 |
+
response = requests.post(f"{BASE_URL}/ai/generate-image", json=data, headers={ "Authorization": f"Bearer {access_token}" })
|
180 |
+
# catch any errors
|
181 |
+
return response.content
|
182 |
+
|
183 |
+
# WebUI
|
184 |
+
def convert_prompt(prompt):
|
185 |
+
return (prompt.replace('(','\\(').replace(')','\\)') # (tag) to \(tag\)
|
186 |
+
.replace('{{', '(').replace('}}',')').replace('{', '(').replace('}', ')') # {{{tag}}} to ((tag))
|
187 |
+
.replace('[[', '[').replace(']]', ']')) # [[[tag]]] to [[tag]]
|
188 |
+
def generate_image_webui(access_token, prompt, model, action, parameters):
|
189 |
+
samplers = {
|
190 |
+
"k_euler": "Euler",
|
191 |
+
"k_euler_ancestral": "Euler a",
|
192 |
+
"k_dpmpp_2s_ancestral": "DPM++ 2S a",
|
193 |
+
"k_dpmpp_sde": "DPM++ SDE"
|
194 |
+
}
|
195 |
+
|
196 |
+
# matching data format
|
197 |
+
data = {
|
198 |
+
"input": prompt,
|
199 |
+
"model": model,
|
200 |
+
"action": action,
|
201 |
+
"parameters": parameters,
|
202 |
+
}
|
203 |
+
|
204 |
+
params = {
|
205 |
+
"prompt": convert_prompt(data['input']),
|
206 |
+
"negative_prompt": convert_prompt(data['parameters']['negative_prompt']),
|
207 |
+
"steps": data['parameters']['steps'],
|
208 |
+
"width": data['parameters']['width'],
|
209 |
+
"height": data['parameters']['height'],
|
210 |
+
"cfg_scale": data['parameters']['scale'],
|
211 |
+
"sampler_index": samplers[data['parameters']['sampler']],
|
212 |
+
"seed": data['parameters']['seed'],
|
213 |
+
"seed_resize_from_h": -1,
|
214 |
+
"seed_resize_from_w": -1,
|
215 |
+
"denoising_strength": None,
|
216 |
+
"n_iter": "1",
|
217 |
+
"batch_size": data['parameters']['n_samples']
|
218 |
+
}
|
219 |
+
|
220 |
+
if data['parameters']['enable_hr'] == True:
|
221 |
+
params['enable_hr'] = True
|
222 |
+
params["hr_upscaler"] = data['parameters']["hr_upscaler"]
|
223 |
+
params["hr_scale"] = data['parameters']["hr_scale"]
|
224 |
+
params["hr_second_pass_steps"] = data['parameters']["hr_second_pass_steps"]
|
225 |
+
params["denoising_strength"] = data['parameters']["denoising_strength"]
|
226 |
+
|
227 |
+
res = requests.post(f"{access_token}/sdapi/v1/txt2img", json=params)
|
228 |
+
imageb64s = res.json()['images']
|
229 |
+
content = None
|
230 |
+
for b64 in imageb64s:
|
231 |
+
img = b64.encode()
|
232 |
+
content = base64.b64decode(img)
|
233 |
+
|
234 |
+
s = io.BytesIO()
|
235 |
+
zf = zipfile.ZipFile(s, "w")
|
236 |
+
zf.writestr("generated", content)
|
237 |
+
zf.close()
|
238 |
+
return s.getvalue()
|
239 |
+
|
240 |
+
# WebUI
|
241 |
+
def generate_image(access_token, prompt, model, action, parameters):
|
242 |
+
if re.match(r'^http[s]?://', access_token):
|
243 |
+
return generate_image_webui(**locals())
|
244 |
+
return generate_image_NAI(**locals())
|
245 |
+
|
246 |
+
def generate(gen_request):
|
247 |
+
|
248 |
+
params = {
|
249 |
+
"legacy": False,
|
250 |
+
"quality_toggle": True if gen_request["quality_toggle"] == 1 else False,
|
251 |
+
"width": gen_request["width"],
|
252 |
+
"height": gen_request["height"],
|
253 |
+
"n_samples": 1,
|
254 |
+
"seed": gen_request["seed"],
|
255 |
+
"extra_noise_seed": random.randint(0,9999999999),
|
256 |
+
"sampler": gen_request["sampler"],
|
257 |
+
"steps": 28 if (gen_request["type"]!="upper" or "steps" not in gen_request) else gen_request["steps"],
|
258 |
+
"scale": gen_request["scale"],
|
259 |
+
"uncond_scale": 1.0,
|
260 |
+
"negative_prompt": gen_request["negative"],
|
261 |
+
"sm" : gen_request["sema"],
|
262 |
+
"sm_dyn" : gen_request["sema_dyn"],
|
263 |
+
"decrisper": False,
|
264 |
+
"controlnet_strength": 1.0,
|
265 |
+
"add_original_image": False,
|
266 |
+
"cfg_rescale": gen_request["cfg_rescale"],
|
267 |
+
"noise_schedule": "native",
|
268 |
+
"enable_hr" : gen_request["enable_hr"]
|
269 |
+
}
|
270 |
+
|
271 |
+
if params["enable_hr"] == True:
|
272 |
+
params["hr_upscaler"] = gen_request["hr_upscaler"]
|
273 |
+
params["hr_scale"] = gen_request["hr_scale"]
|
274 |
+
params["hr_second_pass_steps"] = gen_request["hr_second_pass_steps"]
|
275 |
+
params["denoising_strength"] = gen_request["denoising_strength"]
|
276 |
+
|
277 |
+
# ์์ผ๋์นด๋ ๊ธฐ๋ฅ ๋ง๋ค์ด์ผํจ
|
278 |
+
positive = gen_request["prompt"]
|
279 |
+
|
280 |
+
filename_rule = gen_request["png_rule"]
|
281 |
+
save_folder = gen_request["save_folder"]
|
282 |
+
|
283 |
+
access_token = gen_request["access_token"]
|
284 |
+
additional_folder = ""
|
285 |
+
|
286 |
+
def resize_and_fill(image, max_size=None):
|
287 |
+
if max_size is None:
|
288 |
+
max_size = gen_request["user_screen_size"]
|
289 |
+
original_width, original_height = image.size
|
290 |
+
if original_width > max_size or original_height > max_size:
|
291 |
+
# ๋น์จ์ ์ ์งํ๋ฉด์ ํฌ๊ธฐ ์กฐ์
|
292 |
+
image.thumbnail((max_size, max_size))
|
293 |
+
|
294 |
+
# ์ ์ด๋ฏธ์ง ํฌ๊ธฐ ๊ณ์ฐ
|
295 |
+
width, height = image.size
|
296 |
+
new_image = Image.new("RGB", (max_size, max_size), "black")
|
297 |
+
new_image.paste(image, ((max_size - width) // 2, (max_size - height) // 2))
|
298 |
+
return new_image
|
299 |
+
else:
|
300 |
+
return image
|
301 |
+
|
302 |
+
def log_error(e, output_file_path="output_file_path"):
|
303 |
+
# ํ์ฌ ์๊ฐ์ ๏ฟฝ๏ฟฝ๏ฟฝ์ต๋๋ค
|
304 |
+
current_time = datetime.now().strftime("%m/%d %H:%M:%S")
|
305 |
+
|
306 |
+
# ์๋ฌ ๋ก๊ทธ ๋ฉ์์ง
|
307 |
+
error_message = f"#### Error occured at {current_time} ####\nError: {e}\n############################################\n"
|
308 |
+
|
309 |
+
# ์ง์ ๋ ์ถ๋ ฅ ํด๋์ error_log.txt ํ์ผ์ ์ฐ๊ธฐ
|
310 |
+
with open(f"error_log.txt", "a") as file:
|
311 |
+
file.write(error_message)
|
312 |
+
|
313 |
+
try:
|
314 |
+
zipped_bytes = generate_image(access_token, positive, "nai-diffusion-3", "generate", params)
|
315 |
+
if gen_request["png_rule"] == "count":
|
316 |
+
additional_folder = "/" + gen_request["start_time"]
|
317 |
+
if gen_request["type"] == "turbo":
|
318 |
+
additional_folder += "/turbo"
|
319 |
+
d = Path(save_folder + additional_folder)
|
320 |
+
d.mkdir(parents=True, exist_ok=True)
|
321 |
+
zipped = zipfile.ZipFile(io.BytesIO(zipped_bytes))
|
322 |
+
image_bytes = zipped.read(zipped.infolist()[0])
|
323 |
+
if gen_request["png_rule"] == "count":
|
324 |
+
_count = gen_request["count"]
|
325 |
+
filename = (d / f"{_count:05}.png" )
|
326 |
+
else: filename = (d / f"{datetime.now().strftime('%Y%m%d_%H%M%S')}.png" )
|
327 |
+
filename.write_bytes(image_bytes)
|
328 |
+
i = Image.open(io.BytesIO(image_bytes))
|
329 |
+
i = ImageOps.exif_transpose(i).convert("RGB")
|
330 |
+
i_resized = resize_and_fill(i)
|
331 |
+
#tk_image = ImageTk.PhotoImage(i_resized)
|
332 |
+
return i_resized, positive, params['seed'], i.info, str(filename)
|
333 |
+
except Exception as e:
|
334 |
+
try:
|
335 |
+
if zipped_bytes is None:
|
336 |
+
raise ValueError("Connection broken (Protocol Error)")
|
337 |
+
error_message = zipped_bytes.decode('utf-8')[2:-2]
|
338 |
+
except Exception as inner_exception:
|
339 |
+
error_message = str(inner_exception)
|
340 |
+
log_error(error_message, "path_to_output_folder")
|
341 |
+
return None, error_message, params['seed'], None, None
|
Danbooru Prompt Selector/TEST2024/NAIA_random_function_core (WEBUIA_0119).py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas
|
2 |
+
import numpy as np
|
3 |
+
|
4 |
+
def find_keyword_index(general):
|
5 |
+
# boys์ girls ๋ฆฌ์คํธ์ ์์๊ฐ ์๋์ง ํ์ธ ๋ฐ ์ธ๋ฑ์ค ์ ์ฅ
|
6 |
+
boys = ["1boy", "2boys", "3boys", "4boys", "5boys", "6+boys"]
|
7 |
+
girls = ["1girl", "2girls", "3girls", "4girls", "5girls", "6+girls"]
|
8 |
+
#others = ["1other", "2others", "3others", "4others", "5others", "6+others"]
|
9 |
+
boys_indices = [i for i, item in enumerate(general[:6]) if item in boys]
|
10 |
+
girls_indices = [i for i, item in enumerate(general[:6]) if item in girls]
|
11 |
+
|
12 |
+
# case 1๊ณผ case 2: girls ๋ฆฌ์คํธ์ ์์ ์ฐพ๊ธฐ
|
13 |
+
if girls_indices:
|
14 |
+
return girls_indices[0]+1
|
15 |
+
|
16 |
+
# case 3: boys ๋ฆฌ์คํธ์ ์์ ์ฐพ๊ธฐ
|
17 |
+
if boys_indices:
|
18 |
+
return boys_indices[0]+1
|
19 |
+
|
20 |
+
# case 4: ํด๋น ์ฌํญ ์์
|
21 |
+
return 2
|
22 |
+
|
23 |
+
def RFP(popped_row, fix_prompt, after_prompt, auto_hide_prompt, rm_a, rm_s, rm_c, nsfw, data, magic_word):
|
24 |
+
boys = ["1boy", "2boys", "3boys", "4boys", "5boys", "6+boys"]
|
25 |
+
girls = ["1girl", "2girls", "3girls", "4girls", "5girls", "6+girls"]
|
26 |
+
general = [item.strip() for item in popped_row['general'].split(',')]
|
27 |
+
if nsfw == 1:
|
28 |
+
nsfw_word = []
|
29 |
+
for keyword in general:
|
30 |
+
if keyword in data.qe_word or keyword in data.bag_of_tags or "horns" in keyword or "(" in keyword or keyword in boys or keyword in girls:
|
31 |
+
nsfw_word.append(keyword)
|
32 |
+
general = nsfw_word
|
33 |
+
if rm_c == 1:
|
34 |
+
temp_general = []
|
35 |
+
for keyword in general:
|
36 |
+
if keyword in data.bag_of_tags:
|
37 |
+
temp_general.append(keyword)
|
38 |
+
for keyword in temp_general:
|
39 |
+
general.remove(keyword)
|
40 |
+
fix = [item.strip() for item in fix_prompt[:-1].split(',')]
|
41 |
+
if rm_a == 0:
|
42 |
+
if popped_row['artist']:
|
43 |
+
artists = [item.strip() for item in popped_row['artist'].split(',')]
|
44 |
+
for i, keyword in enumerate(artists):
|
45 |
+
if '(' in keyword and ')' in keyword:
|
46 |
+
artists[i] = keyword.replace('(', '\\(').replace(')', '\\)')
|
47 |
+
artist = ["artist:" + _artist for _artist in artists]
|
48 |
+
fix = fix + artist
|
49 |
+
if rm_s == 0:
|
50 |
+
if popped_row['copyright']:
|
51 |
+
series = [item.strip() for item in popped_row['copyright'].split(',')]
|
52 |
+
for i, keyword in enumerate(series):
|
53 |
+
if '(' in keyword and ')' in keyword:
|
54 |
+
series[i] = keyword.replace('(', '\\(').replace(')', '\\)')
|
55 |
+
else:
|
56 |
+
series = []
|
57 |
+
fix = fix + series
|
58 |
+
after = [item.strip() for item in after_prompt[:-1].split(',')]
|
59 |
+
auto_hide = [item.strip() for item in auto_hide_prompt[:-1].split(',')] + ["| |", ":|", "\||/", "<|> <|>", "|| ||", ";|"]
|
60 |
+
fix_index = find_keyword_index(general)
|
61 |
+
processed = general.copy()
|
62 |
+
temp_hide_prompt = []
|
63 |
+
processed[fix_index:fix_index] = fix
|
64 |
+
processed += after
|
65 |
+
for keyword in processed:
|
66 |
+
if keyword in auto_hide:
|
67 |
+
temp_hide_prompt.append(keyword)
|
68 |
+
for keyword in temp_hide_prompt:
|
69 |
+
processed.remove(keyword)
|
70 |
+
|
71 |
+
if rm_c == 0:
|
72 |
+
if popped_row['character']:
|
73 |
+
character = [item.strip() for item in popped_row['character'].split(',')]
|
74 |
+
for i, keyword in enumerate(character):
|
75 |
+
if '(' in keyword and ')' in keyword:
|
76 |
+
character[i] = keyword.replace('(', '\\(').replace(')', '\\)')
|
77 |
+
processed[fix_index:fix_index] = character
|
78 |
+
fix_index+=len(character)
|
79 |
+
if magic_word["random_artist"] == True:
|
80 |
+
processed.insert(fix_index, magic_word["random_artist_name"])
|
81 |
+
|
82 |
+
boy_in_processed = girl_in_processed = None
|
83 |
+
for boy in boys:
|
84 |
+
if boy in processed:
|
85 |
+
boy_in_processed = boy
|
86 |
+
break
|
87 |
+
|
88 |
+
for girl in girls:
|
89 |
+
if girl in processed:
|
90 |
+
girl_in_processed = girl
|
91 |
+
break
|
92 |
+
|
93 |
+
if boy_in_processed and girl_in_processed:
|
94 |
+
boy_index = processed.index(boy_in_processed)
|
95 |
+
girl_index = processed.index(girl_in_processed)
|
96 |
+
if boy_index > girl_index:
|
97 |
+
processed.pop(boy_index)
|
98 |
+
processed.insert(girl_index, boy_in_processed)
|
99 |
+
if(popped_row['rating'] == 'e'):
|
100 |
+
rating_text = ', rating: explicit, nsfw'
|
101 |
+
elif(popped_row['rating'] == 'q'):
|
102 |
+
rating_text = ', rating: questionable, nsfw'
|
103 |
+
elif(popped_row['rating'] == 's'):
|
104 |
+
rating_text = ', rating: sensitive'
|
105 |
+
else:
|
106 |
+
rating_text = ', rating: general'
|
107 |
+
return 'best quality, masterpiece, '+', '.join(processed)+rating_text
|