akhaliq HF staff commited on
Commit
ae9601e
1 Parent(s): 9f7482c

add huggingface working version

Browse files
Files changed (2) hide show
  1. app.py +3 -0
  2. app_huggingface.py +11 -4
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import gradio as gr
2
 
 
3
  from app_playai import demo as demo_playai
4
  from app_allenai import demo as demo_allenai
5
  from app_claude import demo as demo_claude
@@ -22,6 +23,8 @@ from app_xai import demo as demo_grok
22
  from app_showui import demo as demo_showui
23
 
24
  with gr.Blocks(fill_height=True) as demo:
 
 
25
  with gr.Tab("Fal"):
26
  demo_fal.render()
27
  gr.Markdown("This app is built with gradio, check out gradio github and star: <a href='https://github.com/gradio-app/gradio'>Gradio <img src='https://img.shields.io/github/stars/gradio-app/gradio'></a>.")
 
1
  import gradio as gr
2
 
3
+ from app_huggingface import demo as demo_huggingface
4
  from app_playai import demo as demo_playai
5
  from app_allenai import demo as demo_allenai
6
  from app_claude import demo as demo_claude
 
23
  from app_showui import demo as demo_showui
24
 
25
  with gr.Blocks(fill_height=True) as demo:
26
+ with gr.Tab("Huggingface"):
27
+ demo_huggingface.render()
28
  with gr.Tab("Fal"):
29
  demo_fal.render()
30
  gr.Markdown("This app is built with gradio, check out gradio github and star: <a href='https://github.com/gradio-app/gradio'>Gradio <img src='https://img.shields.io/github/stars/gradio-app/gradio'></a>.")
app_huggingface.py CHANGED
@@ -1,4 +1,4 @@
1
- from gradio_client import Client
2
  import gradio as gr
3
  import os
4
 
@@ -8,9 +8,16 @@ MODELS = {
8
  }
9
 
10
  def create_chat_fn(client):
11
- def chat(message, history, files=[]):
 
 
 
 
 
 
 
12
  response = client.predict(
13
- message={"text": message, "files": files},
14
  system_prompt="You are a helpful AI assistant.",
15
  temperature=0.7,
16
  max_new_tokens=1024,
@@ -71,5 +78,5 @@ with gr.Blocks() as demo:
71
  outputs=[client]
72
  )
73
 
74
- demo = demo
75
 
 
1
+ from gradio_client import Client, handle_file
2
  import gradio as gr
3
  import os
4
 
 
8
  }
9
 
10
  def create_chat_fn(client):
11
+ def chat(message, history):
12
+ # Extract text and files from the message
13
+ text = message.get("text", "")
14
+ files = message.get("files", [])
15
+
16
+ # Handle file uploads if present
17
+ processed_files = [handle_file(f) for f in files]
18
+
19
  response = client.predict(
20
+ message={"text": text, "files": processed_files},
21
  system_prompt="You are a helpful AI assistant.",
22
  temperature=0.7,
23
  max_new_tokens=1024,
 
78
  outputs=[client]
79
  )
80
 
81
+ demo.launch()
82