Upload 2 files
Browse files
app.py
CHANGED
@@ -5,10 +5,9 @@ import numpy as np
|
|
5 |
# DiffuseCraft
|
6 |
from dc import (infer, _infer, pass_result, get_diffusers_model_list, get_samplers,
|
7 |
get_vaes, enable_model_recom_prompt, enable_diffusers_model_detail, extract_exif_data, esrgan_upscale, UPSCALER_KEYS,
|
8 |
-
preset_quality, preset_styles, process_style_prompt
|
9 |
-
|
10 |
-
|
11 |
-
select_civitai_lora, search_civitai_lora_json)
|
12 |
# Translator
|
13 |
from llmdolphin import (dolphin_respond_auto, dolphin_parse_simple,
|
14 |
get_llm_formats, get_dolphin_model_format, get_dolphin_models,
|
@@ -142,7 +141,7 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
142 |
lora5_md = gr.Markdown(value="", visible=False)
|
143 |
with gr.Accordion("From URL", open=True, visible=True):
|
144 |
with gr.Row():
|
145 |
-
lora_search_civitai_basemodel = gr.CheckboxGroup(label="Search LoRA for", choices=
|
146 |
lora_search_civitai_sort = gr.Radio(label="Sort", choices=CIVITAI_SORT, value="Highest Rated")
|
147 |
lora_search_civitai_period = gr.Radio(label="Period", choices=CIVITAI_PERIOD, value="AllTime")
|
148 |
with gr.Row():
|
|
|
5 |
# DiffuseCraft
|
6 |
from dc import (infer, _infer, pass_result, get_diffusers_model_list, get_samplers,
|
7 |
get_vaes, enable_model_recom_prompt, enable_diffusers_model_detail, extract_exif_data, esrgan_upscale, UPSCALER_KEYS,
|
8 |
+
preset_quality, preset_styles, process_style_prompt, get_all_lora_tupled_list, update_loras, apply_lora_prompt,
|
9 |
+
download_my_lora, search_civitai_lora, update_civitai_selection, select_civitai_lora, search_civitai_lora_json)
|
10 |
+
from modutils import get_t2i_model_info, get_civitai_tag, CIVITAI_SORT, CIVITAI_PERIOD, CIVITAI_BASE_MODEL
|
|
|
11 |
# Translator
|
12 |
from llmdolphin import (dolphin_respond_auto, dolphin_parse_simple,
|
13 |
get_llm_formats, get_dolphin_model_format, get_dolphin_models,
|
|
|
141 |
lora5_md = gr.Markdown(value="", visible=False)
|
142 |
with gr.Accordion("From URL", open=True, visible=True):
|
143 |
with gr.Row():
|
144 |
+
lora_search_civitai_basemodel = gr.CheckboxGroup(label="Search LoRA for", choices=CIVITAI_BASE_MODEL, value=["Pony", "SDXL 1.0"])
|
145 |
lora_search_civitai_sort = gr.Radio(label="Sort", choices=CIVITAI_SORT, value="Highest Rated")
|
146 |
lora_search_civitai_period = gr.Radio(label="Period", choices=CIVITAI_PERIOD, value="AllTime")
|
147 |
with gr.Row():
|
dc.py
CHANGED
@@ -783,7 +783,7 @@ from PIL import Image
|
|
783 |
import random, json
|
784 |
from modutils import (safe_float, escape_lora_basename, to_lora_key, to_lora_path,
|
785 |
get_local_model_list, get_private_lora_model_lists, get_valid_lora_name,
|
786 |
-
get_valid_lora_path, get_valid_lora_wt, get_lora_info,
|
787 |
normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en)
|
788 |
|
789 |
sd_gen = GuiSD()
|
@@ -893,35 +893,6 @@ def enable_diffusers_model_detail(is_enable: bool = False, model_name: str = "")
|
|
893 |
return gr.update(value=is_enable), gr.update(value=new_value, choices=get_diffusers_model_list())
|
894 |
|
895 |
|
896 |
-
def get_t2i_model_info(repo_id: str):
|
897 |
-
from huggingface_hub import HfApi
|
898 |
-
api = HfApi()
|
899 |
-
try:
|
900 |
-
if " " in repo_id or not api.repo_exists(repo_id): return ""
|
901 |
-
model = api.model_info(repo_id=repo_id)
|
902 |
-
except Exception as e:
|
903 |
-
print(f"Error: Failed to get {repo_id}'s info. {e}")
|
904 |
-
return ""
|
905 |
-
if model.private or model.gated: return ""
|
906 |
-
tags = model.tags
|
907 |
-
info = []
|
908 |
-
url = f"https://huggingface.co/{repo_id}/"
|
909 |
-
if not 'diffusers' in tags: return ""
|
910 |
-
if 'diffusers:FluxPipeline' in tags:
|
911 |
-
info.append("FLUX.1")
|
912 |
-
elif 'diffusers:StableDiffusionXLPipeline' in tags:
|
913 |
-
info.append("SDXL")
|
914 |
-
elif 'diffusers:StableDiffusionPipeline' in tags:
|
915 |
-
info.append("SD1.5")
|
916 |
-
if model.card_data and model.card_data.tags:
|
917 |
-
info.extend(list_sub(model.card_data.tags, ['text-to-image', 'stable-diffusion', 'stable-diffusion-api', 'safetensors', 'stable-diffusion-xl']))
|
918 |
-
info.append(f"DLs: {model.downloads}")
|
919 |
-
info.append(f"likes: {model.likes}")
|
920 |
-
info.append(model.last_modified.strftime("lastmod: %Y-%m-%d"))
|
921 |
-
md = f"Model Info: {', '.join(info)}, [Model Repo]({url})"
|
922 |
-
return gr.update(value=md)
|
923 |
-
|
924 |
-
|
925 |
def load_model_prompt_dict():
|
926 |
import json
|
927 |
dict = {}
|
@@ -1209,30 +1180,46 @@ def update_loras(prompt, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora
|
|
1209 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
1210 |
|
1211 |
|
1212 |
-
def search_civitai_lora(query, base_model, sort=
|
1213 |
-
global
|
1214 |
-
|
|
|
|
|
|
|
1215 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
1216 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
1217 |
-
|
1218 |
choices = []
|
|
|
1219 |
for item in items:
|
1220 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
1221 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
1222 |
value = item['dl_url']
|
1223 |
choices.append((name, value))
|
1224 |
-
|
|
|
1225 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
1226 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
1227 |
-
|
|
|
|
|
1228 |
md = result['md'] if result else ""
|
1229 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
1230 |
-
gr.update(visible=True), gr.update(visible=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1231 |
|
1232 |
|
1233 |
def select_civitai_lora(search_result):
|
1234 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
1235 |
-
result =
|
1236 |
md = result['md'] if result else ""
|
1237 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
1238 |
|
|
|
783 |
import random, json
|
784 |
from modutils import (safe_float, escape_lora_basename, to_lora_key, to_lora_path,
|
785 |
get_local_model_list, get_private_lora_model_lists, get_valid_lora_name,
|
786 |
+
get_valid_lora_path, get_valid_lora_wt, get_lora_info, CIVITAI_SORT, CIVITAI_PERIOD,
|
787 |
normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en)
|
788 |
|
789 |
sd_gen = GuiSD()
|
|
|
893 |
return gr.update(value=is_enable), gr.update(value=new_value, choices=get_diffusers_model_list())
|
894 |
|
895 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
896 |
def load_model_prompt_dict():
|
897 |
import json
|
898 |
dict = {}
|
|
|
1180 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
1181 |
|
1182 |
|
1183 |
+
def search_civitai_lora(query, base_model=[], sort=CIVITAI_SORT[0], period=CIVITAI_PERIOD[0], tag="", user="", gallery=[]):
|
1184 |
+
global civitai_last_results, civitai_last_choices, civitai_last_gallery
|
1185 |
+
civitai_last_choices = [("", "")]
|
1186 |
+
civitai_last_gallery = []
|
1187 |
+
civitai_last_results = {}
|
1188 |
+
items = search_lora_on_civitai(query, base_model, 100, sort, period, tag, user)
|
1189 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
1190 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
1191 |
+
civitai_last_results = {}
|
1192 |
choices = []
|
1193 |
+
gallery = []
|
1194 |
for item in items:
|
1195 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
1196 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
1197 |
value = item['dl_url']
|
1198 |
choices.append((name, value))
|
1199 |
+
gallery.append((item['img_url'], name))
|
1200 |
+
civitai_last_results[value] = item
|
1201 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
1202 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
1203 |
+
civitai_last_choices = choices
|
1204 |
+
civitai_last_gallery = gallery
|
1205 |
+
result = civitai_last_results.get(choices[0][1], "None")
|
1206 |
md = result['md'] if result else ""
|
1207 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
1208 |
+
gr.update(visible=True), gr.update(visible=True), gr.update(value=gallery)
|
1209 |
+
|
1210 |
+
|
1211 |
+
def update_civitai_selection(evt: gr.SelectData):
|
1212 |
+
try:
|
1213 |
+
selected_index = evt.index
|
1214 |
+
selected = civitai_last_choices[selected_index][1]
|
1215 |
+
return gr.update(value=selected)
|
1216 |
+
except Exception:
|
1217 |
+
return gr.update(visible=True)
|
1218 |
|
1219 |
|
1220 |
def select_civitai_lora(search_result):
|
1221 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
1222 |
+
result = civitai_last_results.get(search_result, "None")
|
1223 |
md = result['md'] if result else ""
|
1224 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
1225 |
|