Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ lpmc_client = gr.load("seungheondoh/LP-Music-Caps-demo", src="spaces")
|
|
6 |
|
7 |
from gradio_client import Client
|
8 |
|
9 |
-
client = Client("https://fffiloni-test-llama-api.hf.space/", hf_token=hf_token)
|
10 |
lyrics_client = Client("https://fffiloni-music-to-lyrics.hf.space/")
|
11 |
visualizer_client = Client("https://fffiloni-animated-audio-visualizer.hf.space/")
|
12 |
|
@@ -139,6 +139,7 @@ def infer(audio_file, has_lyrics):
|
|
139 |
gr.Info("Calling Llama2 ...")
|
140 |
result = client.predict(
|
141 |
llama_q, # str in 'Message' Textbox component
|
|
|
142 |
api_name="/predict"
|
143 |
)
|
144 |
|
@@ -146,25 +147,25 @@ def infer(audio_file, has_lyrics):
|
|
146 |
|
147 |
print(f"Llama2 result: {result}")
|
148 |
|
149 |
-
gr.Info("Prompt Optimization ...")
|
150 |
-
get_shorter_prompt = f"""
|
151 |
-
From this image description, please provide a short but efficient summary for a good Stable Diffusion prompt:
|
152 |
-
'{result}'
|
153 |
-
"""
|
154 |
|
155 |
-
shorten = client.predict(
|
156 |
-
|
157 |
-
|
158 |
-
)
|
159 |
|
160 |
-
print(f'SHORTEN PROMPT: {shorten}')
|
161 |
|
162 |
# βββ
|
163 |
print("""βββ
|
164 |
Calling SD-XL ...
|
165 |
""")
|
166 |
gr.Info('Calling SD-XL ...')
|
167 |
-
prompt =
|
168 |
conditioning, pooled = compel(prompt)
|
169 |
images = pipe(prompt_embeds=conditioning, pooled_prompt_embeds=pooled).images[0]
|
170 |
|
|
|
6 |
|
7 |
from gradio_client import Client
|
8 |
|
9 |
+
client = Client("https://fffiloni-test-llama-api-debug.hf.space/", hf_token=hf_token)
|
10 |
lyrics_client = Client("https://fffiloni-music-to-lyrics.hf.space/")
|
11 |
visualizer_client = Client("https://fffiloni-animated-audio-visualizer.hf.space/")
|
12 |
|
|
|
139 |
gr.Info("Calling Llama2 ...")
|
140 |
result = client.predict(
|
141 |
llama_q, # str in 'Message' Textbox component
|
142 |
+
"M2I",
|
143 |
api_name="/predict"
|
144 |
)
|
145 |
|
|
|
147 |
|
148 |
print(f"Llama2 result: {result}")
|
149 |
|
150 |
+
#gr.Info("Prompt Optimization ...")
|
151 |
+
#get_shorter_prompt = f"""
|
152 |
+
#From this image description, please provide a short but efficient summary for a good Stable Diffusion prompt:
|
153 |
+
#'{result}'
|
154 |
+
#"""
|
155 |
|
156 |
+
#shorten = client.predict(
|
157 |
+
# get_shorter_prompt, # str in 'Message' Textbox component
|
158 |
+
# api_name="/predict"
|
159 |
+
#)
|
160 |
|
161 |
+
#print(f'SHORTEN PROMPT: {shorten}')
|
162 |
|
163 |
# βββ
|
164 |
print("""βββ
|
165 |
Calling SD-XL ...
|
166 |
""")
|
167 |
gr.Info('Calling SD-XL ...')
|
168 |
+
prompt = result
|
169 |
conditioning, pooled = compel(prompt)
|
170 |
images = pipe(prompt_embeds=conditioning, pooled_prompt_embeds=pooled).images[0]
|
171 |
|