Spaces:
Running
Running
broadfield
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -173,7 +173,7 @@ def build_space(repo_name,file_name,file_content,access_token=""):
|
|
173 |
return [{'role':'assistant','content': 'There was an Error: ' + str(e)}]
|
174 |
|
175 |
|
176 |
-
def agent(prompt_in,history,mod=2):
|
177 |
print(prompt_in)
|
178 |
print('mod ',mod)
|
179 |
in_data=[None,None,None,None,None,]
|
@@ -237,7 +237,7 @@ def agent(prompt_in,history,mod=2):
|
|
237 |
ret1,ret2 = parse_json(out_w[2].split('<|im_end|>')[0])
|
238 |
print('ret1',ret1)
|
239 |
print('ret2',ret2)
|
240 |
-
build_out = build_space(repo_,ret1,ret2)
|
241 |
history+=[{'role':'system','content':f'observation:{build_out}'}]
|
242 |
yield history
|
243 |
|
@@ -272,6 +272,7 @@ with gr.Blocks() as ux:
|
|
272 |
chatbot=gr.Chatbot(type='messages',show_label=False, show_share_button=False, show_copy_button=True, layout="panel")
|
273 |
prompt=gr.MultimodalTextbox(label="Prompt",file_count="multiple", file_types=["image"])
|
274 |
mod_c=gr.Dropdown(choices=[n['name'] for n in clients],value='Qwen/Qwen2.5-Coder-32B-Instruct',type='index')
|
|
|
275 |
#chat_ux=gr.ChatInterface(fn=agent,chatbot=chatbot,additional_inputs=[mod_c]).load()
|
276 |
#chat_ux.additional_inputs=[mod_c]
|
277 |
#chat_ux.load()
|
@@ -282,9 +283,10 @@ with gr.Blocks() as ux:
|
|
282 |
with gr.Row(visible=False):
|
283 |
stt=gr.Textbox()
|
284 |
with gr.Column():
|
285 |
-
|
286 |
-
|
287 |
-
|
|
|
288 |
stop_b.click(None,None,None, cancels=[sub_b,sub_p])
|
289 |
ux.queue(default_concurrency_limit=20).launch(max_threads=40)
|
290 |
|
|
|
173 |
return [{'role':'assistant','content': 'There was an Error: ' + str(e)}]
|
174 |
|
175 |
|
176 |
+
def agent(prompt_in,history,mod=2,tok_in=""):
|
177 |
print(prompt_in)
|
178 |
print('mod ',mod)
|
179 |
in_data=[None,None,None,None,None,]
|
|
|
237 |
ret1,ret2 = parse_json(out_w[2].split('<|im_end|>')[0])
|
238 |
print('ret1',ret1)
|
239 |
print('ret2',ret2)
|
240 |
+
build_out = build_space(repo_,ret1,ret2,access_token=tok_in)
|
241 |
history+=[{'role':'system','content':f'observation:{build_out}'}]
|
242 |
yield history
|
243 |
|
|
|
272 |
chatbot=gr.Chatbot(type='messages',show_label=False, show_share_button=False, show_copy_button=True, layout="panel")
|
273 |
prompt=gr.MultimodalTextbox(label="Prompt",file_count="multiple", file_types=["image"])
|
274 |
mod_c=gr.Dropdown(choices=[n['name'] for n in clients],value='Qwen/Qwen2.5-Coder-32B-Instruct',type='index')
|
275 |
+
tok_in=gr.Textbox(label='HF TOKEN')
|
276 |
#chat_ux=gr.ChatInterface(fn=agent,chatbot=chatbot,additional_inputs=[mod_c]).load()
|
277 |
#chat_ux.additional_inputs=[mod_c]
|
278 |
#chat_ux.load()
|
|
|
283 |
with gr.Row(visible=False):
|
284 |
stt=gr.Textbox()
|
285 |
with gr.Column():
|
286 |
+
gr.HTML()
|
287 |
+
#html_view=gr.HTML("""<iframe src='https://huggingface.co/spaces/community-pool/test1/tree/main' height='1000' width='200'>Viewer Space</iframe>""")
|
288 |
+
sub_b = submit_b.click(agent, [prompt,chatbot,mod_c,tok_in],chatbot)
|
289 |
+
sub_p = prompt.submit(agent, [prompt,chatbot,mod_c,tok_in],chatbot)
|
290 |
stop_b.click(None,None,None, cancels=[sub_b,sub_p])
|
291 |
ux.queue(default_concurrency_limit=20).launch(max_threads=40)
|
292 |
|