feat(app): added category and yelan-jp
Browse files- app-full.py +83 -94
- config.py +1 -4
- weights/folder_info.json +9 -0
- weights/{ayaka-jp β genshin-impact/ayaka-jp}/added_IVF2202_Flat_nprobe_1.index +0 -0
- weights/{ayaka-jp β genshin-impact/ayaka-jp}/ayaka-jp.pth +0 -0
- weights/{ayaka-jp β genshin-impact/ayaka-jp}/cover.png +0 -0
- weights/{ayato-jp β genshin-impact/ayato-jp}/added_IVF1304_Flat_nprobe_1.index +0 -0
- weights/{ayato-jp β genshin-impact/ayato-jp}/ayato-jp.pth +0 -0
- weights/{ayato-jp β genshin-impact/ayato-jp}/cover.png +0 -0
- weights/{yoimiya-jp/cover.png β genshin-impact/cover.jpg} +2 -2
- weights/{diluc-jp β genshin-impact/diluc-jp}/added_IVF1511_Flat_nprobe_1.index +0 -0
- weights/{diluc-jp β genshin-impact/diluc-jp}/cover.png +0 -0
- weights/{diluc-jp β genshin-impact/diluc-jp}/diluc-jp.pth +0 -0
- weights/{eula-jp β genshin-impact/eula-jp}/added_IVF2219_Flat_nprobe_1.index +0 -0
- weights/{eula-jp β genshin-impact/eula-jp}/cover.png +0 -0
- weights/{eula-jp β genshin-impact/eula-jp}/eula-jp.pth +0 -0
- weights/{fischl-jp β genshin-impact/fischl-jp}/added_IVF1225_Flat_nprobe_1.index +0 -0
- weights/genshin-impact/fischl-jp/cover.png +3 -0
- weights/{fischl-jp β genshin-impact/fischl-jp}/fischl-jp.pth +0 -0
- weights/{ganyu-jp β genshin-impact/ganyu-jp}/added_IVF1636_Flat_nprobe_1.index +0 -0
- weights/{ganyu-jp β genshin-impact/ganyu-jp}/cover.png +0 -0
- weights/{ganyu-jp β genshin-impact/ganyu-jp}/ganyu-jp.pth +0 -0
- weights/{kaeya-jp β genshin-impact/kaeya-jp}/added_IVF1655_Flat_nprobe_1.index +0 -0
- weights/{kaeya-jp β genshin-impact/kaeya-jp}/cover.png +0 -0
- weights/{kaeya-jp β genshin-impact/kaeya-jp}/kaeya-jp.pth +0 -0
- weights/{keqing-jp β genshin-impact/keqing-jp}/added_IVF1634_Flat_nprobe_1.index +0 -0
- weights/{keqing-jp β genshin-impact/keqing-jp}/cover.png +0 -0
- weights/{keqing-jp β genshin-impact/keqing-jp}/keqing-jp.pth +0 -0
- weights/{kokomi-jp β genshin-impact/kokomi-jp}/added_IVF1723_Flat_nprobe_1.index +0 -0
- weights/{kokomi-jp β genshin-impact/kokomi-jp}/cover.png +0 -0
- weights/{kokomi-jp β genshin-impact/kokomi-jp}/kokomi-jp.pth +0 -0
- weights/{model_info.json β genshin-impact/model_info.json} +8 -0
- weights/{nilou-jp β genshin-impact/nilou-jp}/added_IVF2384_Flat_nprobe_1.index +0 -0
- weights/{nilou-jp β genshin-impact/nilou-jp}/cover.png +0 -0
- weights/{nilou-jp β genshin-impact/nilou-jp}/nilou-jp.pth +0 -0
- weights/genshin-impact/yelan-jp/added_IVF2051_Flat_nprobe_1.index +3 -0
- weights/{fischl-jp β genshin-impact/yelan-jp}/cover.png +2 -2
- weights/genshin-impact/yelan-jp/yelan-jp.pth +3 -0
- weights/{yoimiya-jp β genshin-impact/yoimiya-jp}/added_IVF2034_Flat_nprobe_1.index +0 -0
- weights/genshin-impact/yoimiya-jp/cover.png +3 -0
- weights/{yoimiya-jp β genshin-impact/yoimiya-jp}/yoimiya-jp.pth +0 -0
app-full.py
CHANGED
@@ -150,10 +150,21 @@ def change_to_upload_mode(upload_mode):
|
|
150 |
if __name__ == '__main__':
|
151 |
load_hubert()
|
152 |
models = []
|
|
|
153 |
tts_voice_list = asyncio.get_event_loop().run_until_complete(edge_tts.list_voices())
|
154 |
voices = [f"{v['ShortName']}-{v['Gender']}" for v in tts_voice_list]
|
155 |
-
|
156 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
157 |
models_info = json.load(f)
|
158 |
for name, info in models_info.items():
|
159 |
if not info['enable']:
|
@@ -178,104 +189,82 @@ if __name__ == '__main__':
|
|
178 |
else:
|
179 |
net_g = net_g.float()
|
180 |
vc = VC(tgt_sr, config)
|
181 |
-
|
182 |
-
else:
|
183 |
-
folder_path = "weights"
|
184 |
-
for name in os.listdir(folder_path):
|
185 |
-
print("check folder: " + name)
|
186 |
-
if name.startswith("."): break
|
187 |
-
cover_path = glob.glob(f"{folder_path}/{name}/*.png") + glob.glob(f"{folder_path}/{name}/*.jpg")
|
188 |
-
index_path = glob.glob(f"{folder_path}/{name}/*.index")
|
189 |
-
checkpoint_path = glob.glob(f"{folder_path}/{name}/*.pth")
|
190 |
-
title = name
|
191 |
-
author = ""
|
192 |
-
if cover_path:
|
193 |
-
cover = cover_path[0]
|
194 |
-
else:
|
195 |
-
cover = ""
|
196 |
-
index = index_path[0]
|
197 |
-
cpt = torch.load(checkpoint_path[0], map_location="cpu")
|
198 |
-
tgt_sr = cpt["config"][-1]
|
199 |
-
cpt["config"][-3] = cpt["weight"]["emb_g.weight"].shape[0] # n_spk
|
200 |
-
if_f0 = cpt.get("f0", 1)
|
201 |
-
if if_f0 == 1:
|
202 |
-
net_g = SynthesizerTrnMs256NSFsid(*cpt["config"], is_half=config.is_half)
|
203 |
-
else:
|
204 |
-
net_g = SynthesizerTrnMs256NSFsid_nono(*cpt["config"])
|
205 |
-
del net_g.enc_q
|
206 |
-
print(net_g.load_state_dict(cpt["weight"], strict=False)) # δΈε θΏδΈθ‘ζΈ
δΈεΉ²ε, ηε₯θ©
|
207 |
-
net_g.eval().to(config.device)
|
208 |
-
if config.is_half:
|
209 |
-
net_g = net_g.half()
|
210 |
-
else:
|
211 |
-
net_g = net_g.float()
|
212 |
-
vc = VC(tgt_sr, config)
|
213 |
models.append((name, title, author, cover, create_vc_fn(tgt_sr, net_g, vc, if_f0, index)))
|
214 |
with gr.Blocks() as app:
|
215 |
gr.Markdown(
|
216 |
"# <center> RVC Models [(Latest Update)](https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI/releases/tag/20230428updated)\n"
|
217 |
"## <center> The input audio should be clean and pure voice without background music.\n"
|
218 |
-
"### <center>
|
219 |
-
"#### <center> Please regenerate your model to latest RVC to fully applied this new rvc.\n"
|
220 |
"[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/110kiMZTdP6Ri1lY9-NbQf17GVPPhHyeT?usp=sharing)\n\n"
|
221 |
"[![Original Repo](https://badgen.net/badge/icon/github?icon=github&label=Original%20Repo)](https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI)"
|
222 |
)
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
)
|
234 |
-
|
235 |
-
with gr.
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
281 |
app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=config.colab)
|
|
|
150 |
if __name__ == '__main__':
|
151 |
load_hubert()
|
152 |
models = []
|
153 |
+
categories = []
|
154 |
tts_voice_list = asyncio.get_event_loop().run_until_complete(edge_tts.list_voices())
|
155 |
voices = [f"{v['ShortName']}-{v['Gender']}" for v in tts_voice_list]
|
156 |
+
with open("weights/folder_info.json", "r", encoding="utf-8") as f:
|
157 |
+
folder_info = json.load(f)
|
158 |
+
for name, info in folder_info.items():
|
159 |
+
if not info['enable']:
|
160 |
+
continue
|
161 |
+
title = name
|
162 |
+
folder = info['folder_path']
|
163 |
+
cover = f"{folder}/{info['cover']}"
|
164 |
+
markdown = info['markdown']
|
165 |
+
catergories.append([title, folder, cover, markdown])
|
166 |
+
for (title, folder, cover, markdown) in categories:
|
167 |
+
with open(f"weights/{folder}/model_info.json", "r", encoding="utf-8") as f:
|
168 |
models_info = json.load(f)
|
169 |
for name, info in models_info.items():
|
170 |
if not info['enable']:
|
|
|
189 |
else:
|
190 |
net_g = net_g.float()
|
191 |
vc = VC(tgt_sr, config)
|
192 |
+
print(f"Model loaded: {name}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
models.append((name, title, author, cover, create_vc_fn(tgt_sr, net_g, vc, if_f0, index)))
|
194 |
with gr.Blocks() as app:
|
195 |
gr.Markdown(
|
196 |
"# <center> RVC Models [(Latest Update)](https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI/releases/tag/20230428updated)\n"
|
197 |
"## <center> The input audio should be clean and pure voice without background music.\n"
|
198 |
+
"### <center> This project was inspired by [zomehwh](https://huggingface.co/spaces/zomehwh/rvc-models) and [ardha27](https://huggingface.co/spaces/ardha27/rvc-models)\n"
|
|
|
199 |
"[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/110kiMZTdP6Ri1lY9-NbQf17GVPPhHyeT?usp=sharing)\n\n"
|
200 |
"[![Original Repo](https://badgen.net/badge/icon/github?icon=github&label=Original%20Repo)](https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI)"
|
201 |
)
|
202 |
+
for (title, folder, cover, markdown) in categories:
|
203 |
+
gr.Markdown(
|
204 |
+
'<div align="center">'
|
205 |
+
(f'<img style="width:auto;height:500px;" src="file/{cover}">' if cover else "")+
|
206 |
+
'<div>'
|
207 |
+
)
|
208 |
+
with gr.TabItem(title):
|
209 |
+
with gr.Tabs():
|
210 |
+
if not models == True:
|
211 |
+
gr.Markdown("# <center> No Model Loaded.")
|
212 |
+
return gr.Markdown("## <center> Please added the model or fix your model path.")
|
213 |
+
for (name, title, author, cover, vc_fn) in models:
|
214 |
+
with gr.TabItem(name):
|
215 |
+
with gr.Row():
|
216 |
+
gr.Markdown(
|
217 |
+
'<div align="center">'
|
218 |
+
f'<div>{title}</div>\n'+
|
219 |
+
(f'<div>Model author: {author}</div>' if author else "")+
|
220 |
+
(f'<img style="width:auto;height:300px;" src="file/{cover}">' if cover else "")+
|
221 |
+
'</div>'
|
222 |
+
)
|
223 |
+
with gr.Row():
|
224 |
+
with gr.Column():
|
225 |
+
vc_youtube = gr.Textbox(label="Youtube URL")
|
226 |
+
vc_convert = gr.Button("Convert", variant="primary")
|
227 |
+
vc_vocal_preview = gr.Audio(label="Vocal Preview")
|
228 |
+
vc_inst_preview = gr.Audio(label="Instrumental Preview")
|
229 |
+
vc_audio_preview = gr.Audio(label="Audio Preview")
|
230 |
+
with gr.Column():
|
231 |
+
vc_input = gr.Textbox(label="Input audio path")
|
232 |
+
vc_upload = gr.Audio(label="Upload audio file", visible=False, interactive=True)
|
233 |
+
upload_mode = gr.Checkbox(label="Upload mode", value=False)
|
234 |
+
vc_transpose = gr.Number(label="Transpose", value=0)
|
235 |
+
vc_f0method = gr.Radio(
|
236 |
+
label="Pitch extraction algorithm, PM is fast but Harvest is better for low frequencies",
|
237 |
+
choices=["pm", "harvest"],
|
238 |
+
value="pm",
|
239 |
+
interactive=True,
|
240 |
+
)
|
241 |
+
vc_index_ratio = gr.Slider(
|
242 |
+
minimum=0,
|
243 |
+
maximum=1,
|
244 |
+
label="Retrieval feature ratio",
|
245 |
+
value=0.6,
|
246 |
+
interactive=True,
|
247 |
+
)
|
248 |
+
tts_mode = gr.Checkbox(label="tts (use edge-tts as input)", value=False)
|
249 |
+
tts_text = gr.Textbox(visible=False,label="TTS text (100 words limitation)" if limitation else "TTS text")
|
250 |
+
tts_voice = gr.Dropdown(label="Edge-tts speaker", choices=voices, visible=False, allow_custom_value=False, value="en-US-AnaNeural-Female")
|
251 |
+
vc_output1 = gr.Textbox(label="Output Message")
|
252 |
+
vc_output2 = gr.Audio(label="Output Audio")
|
253 |
+
vc_submit = gr.Button("Generate", variant="primary")
|
254 |
+
with gr.Column():
|
255 |
+
vc_volume = gr.Slider(
|
256 |
+
minimum=0,
|
257 |
+
maximum=10,
|
258 |
+
label="Vocal volume",
|
259 |
+
value=4,
|
260 |
+
interactive=True,
|
261 |
+
step=1
|
262 |
+
)
|
263 |
+
vc_outputCombine = gr.Audio(label="Output Combined Audio")
|
264 |
+
vc_combine = gr.Button("Combine",variant="primary")
|
265 |
+
vc_submit.click(vc_fn, [vc_input, vc_upload, upload_mode, vc_transpose, vc_f0method, vc_index_ratio, tts_mode, tts_text, tts_voice], [vc_output1, vc_output2])
|
266 |
+
vc_convert.click(cut_vocal_and_inst, vc_youtube, [vc_vocal_preview, vc_inst_preview, vc_audio_preview, vc_input])
|
267 |
+
vc_combine.click(combine_vocal_and_inst, [vc_output2, vc_volume], vc_outputCombine)
|
268 |
+
tts_mode.change(change_to_tts_mode, [tts_mode, upload_mode], [vc_input, vc_upload, upload_mode, tts_text, tts_voice])
|
269 |
+
upload_mode.change(change_to_upload_mode, [upload_mode], [vc_input, vc_upload])
|
270 |
app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=config.colab)
|
config.py
CHANGED
@@ -16,8 +16,7 @@ class Config:
|
|
16 |
self.colab,
|
17 |
self.noparallel,
|
18 |
self.noautoopen,
|
19 |
-
self.api
|
20 |
-
self.json
|
21 |
) = self.arg_parse()
|
22 |
self.x_pad, self.x_query, self.x_center, self.x_max = self.device_config()
|
23 |
|
@@ -38,7 +37,6 @@ class Config:
|
|
38 |
help="Do not open in browser automatically",
|
39 |
)
|
40 |
parser.add_argument('--api', action="store_true", default=False)
|
41 |
-
parser.add_argument("--json", action="store_true", default=False, help="use model_info.json")
|
42 |
cmd_opts = parser.parse_args()
|
43 |
|
44 |
cmd_opts.port = cmd_opts.port if 0 <= cmd_opts.port <= 65535 else 7865
|
@@ -50,7 +48,6 @@ class Config:
|
|
50 |
cmd_opts.noparallel,
|
51 |
cmd_opts.noautoopen,
|
52 |
cmd_opts.api,
|
53 |
-
cmd_opts.json
|
54 |
)
|
55 |
|
56 |
def device_config(self) -> tuple:
|
|
|
16 |
self.colab,
|
17 |
self.noparallel,
|
18 |
self.noautoopen,
|
19 |
+
self.api
|
|
|
20 |
) = self.arg_parse()
|
21 |
self.x_pad, self.x_query, self.x_center, self.x_max = self.device_config()
|
22 |
|
|
|
37 |
help="Do not open in browser automatically",
|
38 |
)
|
39 |
parser.add_argument('--api', action="store_true", default=False)
|
|
|
40 |
cmd_opts = parser.parse_args()
|
41 |
|
42 |
cmd_opts.port = cmd_opts.port if 0 <= cmd_opts.port <= 65535 else 7865
|
|
|
48 |
cmd_opts.noparallel,
|
49 |
cmd_opts.noautoopen,
|
50 |
cmd_opts.api,
|
|
|
51 |
)
|
52 |
|
53 |
def device_config(self) -> tuple:
|
weights/folder_info.json
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"genshin-impact":{
|
3 |
+
"enable": true,
|
4 |
+
"title": "Genshin Impact",
|
5 |
+
"folder_path": "genshin-impact",
|
6 |
+
"cover": "cover.png",
|
7 |
+
"markdown": ""
|
8 |
+
}
|
9 |
+
}
|
weights/{ayaka-jp β genshin-impact/ayaka-jp}/added_IVF2202_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{ayaka-jp β genshin-impact/ayaka-jp}/ayaka-jp.pth
RENAMED
File without changes
|
weights/{ayaka-jp β genshin-impact/ayaka-jp}/cover.png
RENAMED
File without changes
|
weights/{ayato-jp β genshin-impact/ayato-jp}/added_IVF1304_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{ayato-jp β genshin-impact/ayato-jp}/ayato-jp.pth
RENAMED
File without changes
|
weights/{ayato-jp β genshin-impact/ayato-jp}/cover.png
RENAMED
File without changes
|
weights/{yoimiya-jp/cover.png β genshin-impact/cover.jpg}
RENAMED
File without changes
|
weights/{diluc-jp β genshin-impact/diluc-jp}/added_IVF1511_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{diluc-jp β genshin-impact/diluc-jp}/cover.png
RENAMED
File without changes
|
weights/{diluc-jp β genshin-impact/diluc-jp}/diluc-jp.pth
RENAMED
File without changes
|
weights/{eula-jp β genshin-impact/eula-jp}/added_IVF2219_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{eula-jp β genshin-impact/eula-jp}/cover.png
RENAMED
File without changes
|
weights/{eula-jp β genshin-impact/eula-jp}/eula-jp.pth
RENAMED
File without changes
|
weights/{fischl-jp β genshin-impact/fischl-jp}/added_IVF1225_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/genshin-impact/fischl-jp/cover.png
ADDED
Git LFS Details
|
weights/{fischl-jp β genshin-impact/fischl-jp}/fischl-jp.pth
RENAMED
File without changes
|
weights/{ganyu-jp β genshin-impact/ganyu-jp}/added_IVF1636_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{ganyu-jp β genshin-impact/ganyu-jp}/cover.png
RENAMED
File without changes
|
weights/{ganyu-jp β genshin-impact/ganyu-jp}/ganyu-jp.pth
RENAMED
File without changes
|
weights/{kaeya-jp β genshin-impact/kaeya-jp}/added_IVF1655_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{kaeya-jp β genshin-impact/kaeya-jp}/cover.png
RENAMED
File without changes
|
weights/{kaeya-jp β genshin-impact/kaeya-jp}/kaeya-jp.pth
RENAMED
File without changes
|
weights/{keqing-jp β genshin-impact/keqing-jp}/added_IVF1634_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{keqing-jp β genshin-impact/keqing-jp}/cover.png
RENAMED
File without changes
|
weights/{keqing-jp β genshin-impact/keqing-jp}/keqing-jp.pth
RENAMED
File without changes
|
weights/{kokomi-jp β genshin-impact/kokomi-jp}/added_IVF1723_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{kokomi-jp β genshin-impact/kokomi-jp}/cover.png
RENAMED
File without changes
|
weights/{kokomi-jp β genshin-impact/kokomi-jp}/kokomi-jp.pth
RENAMED
File without changes
|
weights/{model_info.json β genshin-impact/model_info.json}
RENAMED
@@ -47,6 +47,14 @@
|
|
47 |
"feature_retrieval_library": "added_IVF1723_Flat_nprobe_1.index",
|
48 |
"author":"ArkanDash"
|
49 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
"yoimiya-jp": {
|
51 |
"enable": true,
|
52 |
"name": "yoimiya-jp",
|
|
|
47 |
"feature_retrieval_library": "added_IVF1723_Flat_nprobe_1.index",
|
48 |
"author":"ArkanDash"
|
49 |
},
|
50 |
+
"yelan-jp": {
|
51 |
+
"enable": true,
|
52 |
+
"name": "yelan-jp",
|
53 |
+
"title": "Genshin Impact - Yelan",
|
54 |
+
"cover": "cover.png",
|
55 |
+
"feature_retrieval_library": "added_IVF2051_Flat_nprobe_1.index",
|
56 |
+
"author":"ArkanDash"
|
57 |
+
},
|
58 |
"yoimiya-jp": {
|
59 |
"enable": true,
|
60 |
"name": "yoimiya-jp",
|
weights/{nilou-jp β genshin-impact/nilou-jp}/added_IVF2384_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/{nilou-jp β genshin-impact/nilou-jp}/cover.png
RENAMED
File without changes
|
weights/{nilou-jp β genshin-impact/nilou-jp}/nilou-jp.pth
RENAMED
File without changes
|
weights/genshin-impact/yelan-jp/added_IVF2051_Flat_nprobe_1.index
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba725109af57b0c32075582e17571e59ee3a3c33b34b00e13481bd15c0bf0920
|
3 |
+
size 84694315
|
weights/{fischl-jp β genshin-impact/yelan-jp}/cover.png
RENAMED
File without changes
|
weights/genshin-impact/yelan-jp/yelan-jp.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d62ff825db07022e7d380d13c827f27730c99f6d029e8fb8b1d7cd90018d3c49
|
3 |
+
size 55027130
|
weights/{yoimiya-jp β genshin-impact/yoimiya-jp}/added_IVF2034_Flat_nprobe_1.index
RENAMED
File without changes
|
weights/genshin-impact/yoimiya-jp/cover.png
ADDED
Git LFS Details
|
weights/{yoimiya-jp β genshin-impact/yoimiya-jp}/yoimiya-jp.pth
RENAMED
File without changes
|