fffiloni commited on
Commit
bc95dd0
β€’
1 Parent(s): dfc461b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -12
app.py CHANGED
@@ -6,7 +6,7 @@ lpmc_client = gr.load("seungheondoh/LP-Music-Caps-demo", src="spaces")
6
 
7
  from gradio_client import Client
8
 
9
- client = Client("https://fffiloni-test-llama-api.hf.space/", hf_token=hf_token)
10
  lyrics_client = Client("https://fffiloni-music-to-lyrics.hf.space/")
11
  visualizer_client = Client("https://fffiloni-animated-audio-visualizer.hf.space/")
12
 
@@ -139,6 +139,7 @@ def infer(audio_file, has_lyrics):
139
  gr.Info("Calling Llama2 ...")
140
  result = client.predict(
141
  llama_q, # str in 'Message' Textbox component
 
142
  api_name="/predict"
143
  )
144
 
@@ -146,25 +147,25 @@ def infer(audio_file, has_lyrics):
146
 
147
  print(f"Llama2 result: {result}")
148
 
149
- gr.Info("Prompt Optimization ...")
150
- get_shorter_prompt = f"""
151
- From this image description, please provide a short but efficient summary for a good Stable Diffusion prompt:
152
- '{result}'
153
- """
154
 
155
- shorten = client.predict(
156
- get_shorter_prompt, # str in 'Message' Textbox component
157
- api_name="/predict"
158
- )
159
 
160
- print(f'SHORTEN PROMPT: {shorten}')
161
 
162
  # β€”β€”β€”
163
  print("""β€”β€”β€”
164
  Calling SD-XL ...
165
  """)
166
  gr.Info('Calling SD-XL ...')
167
- prompt = shorten
168
  conditioning, pooled = compel(prompt)
169
  images = pipe(prompt_embeds=conditioning, pooled_prompt_embeds=pooled).images[0]
170
 
 
6
 
7
  from gradio_client import Client
8
 
9
+ client = Client("https://fffiloni-test-llama-api-debug.hf.space/", hf_token=hf_token)
10
  lyrics_client = Client("https://fffiloni-music-to-lyrics.hf.space/")
11
  visualizer_client = Client("https://fffiloni-animated-audio-visualizer.hf.space/")
12
 
 
139
  gr.Info("Calling Llama2 ...")
140
  result = client.predict(
141
  llama_q, # str in 'Message' Textbox component
142
+ "M2I",
143
  api_name="/predict"
144
  )
145
 
 
147
 
148
  print(f"Llama2 result: {result}")
149
 
150
+ #gr.Info("Prompt Optimization ...")
151
+ #get_shorter_prompt = f"""
152
+ #From this image description, please provide a short but efficient summary for a good Stable Diffusion prompt:
153
+ #'{result}'
154
+ #"""
155
 
156
+ #shorten = client.predict(
157
+ # get_shorter_prompt, # str in 'Message' Textbox component
158
+ # api_name="/predict"
159
+ #)
160
 
161
+ #print(f'SHORTEN PROMPT: {shorten}')
162
 
163
  # β€”β€”β€”
164
  print("""β€”β€”β€”
165
  Calling SD-XL ...
166
  """)
167
  gr.Info('Calling SD-XL ...')
168
+ prompt = result
169
  conditioning, pooled = compel(prompt)
170
  images = pipe(prompt_embeds=conditioning, pooled_prompt_embeds=pooled).images[0]
171