Spaces:
Runtime error
Runtime error
add playai
Browse files- app.py +3 -0
- app_huggingface.py +15 -13
- app_playai.py +7 -0
- requirements.txt +1 -0
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
|
|
|
3 |
from app_allenai import demo as demo_allenai
|
4 |
from app_claude import demo as demo_claude
|
5 |
from app_experimental import demo as demo_experimental
|
@@ -20,6 +21,8 @@ from app_together import demo as demo_together
|
|
20 |
from app_xai import demo as demo_grok
|
21 |
|
22 |
with gr.Blocks(fill_height=True) as demo:
|
|
|
|
|
23 |
with gr.Tab("Grok"):
|
24 |
demo_grok.render()
|
25 |
with gr.Tab("Hyperbolic"):
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
+
from app_playai import demo as demo_playai
|
4 |
from app_allenai import demo as demo_allenai
|
5 |
from app_claude import demo as demo_claude
|
6 |
from app_experimental import demo as demo_experimental
|
|
|
21 |
from app_xai import demo as demo_grok
|
22 |
|
23 |
with gr.Blocks(fill_height=True) as demo:
|
24 |
+
with gr.Tab("PlayAI"):
|
25 |
+
demo_playai.render()
|
26 |
with gr.Tab("Grok"):
|
27 |
demo_grok.render()
|
28 |
with gr.Tab("Hyperbolic"):
|
app_huggingface.py
CHANGED
@@ -1,14 +1,16 @@
|
|
1 |
from gradio_client import Client
|
2 |
import gradio as gr
|
|
|
|
|
3 |
|
4 |
MODELS = {
|
5 |
"SmolVLM-Instruct": "akhaliq/SmolVLM-Instruct"
|
6 |
}
|
7 |
|
8 |
def create_chat_fn(client):
|
9 |
-
def chat(message, history):
|
10 |
response = client.predict(
|
11 |
-
message={"text": message, "files":
|
12 |
system_prompt="You are a helpful AI assistant.",
|
13 |
temperature=0.7,
|
14 |
max_new_tokens=1024,
|
@@ -22,8 +24,8 @@ def create_chat_fn(client):
|
|
22 |
|
23 |
def set_client_for_session(model_name, request: gr.Request):
|
24 |
headers = {}
|
25 |
-
if request and hasattr(request, '
|
26 |
-
x_ip_token = request.
|
27 |
if x_ip_token:
|
28 |
headers["X-IP-Token"] = x_ip_token
|
29 |
|
@@ -32,7 +34,11 @@ def set_client_for_session(model_name, request: gr.Request):
|
|
32 |
def safe_chat_fn(message, history, client):
|
33 |
if client is None:
|
34 |
return "Error: Client not initialized. Please refresh the page."
|
35 |
-
|
|
|
|
|
|
|
|
|
36 |
|
37 |
with gr.Blocks() as demo:
|
38 |
|
@@ -52,22 +58,18 @@ with gr.Blocks() as demo:
|
|
52 |
)
|
53 |
|
54 |
# Update client when model changes
|
55 |
-
def update_model(model_name, request):
|
56 |
-
return set_client_for_session(model_name, request)
|
57 |
-
|
58 |
model_dropdown.change(
|
59 |
-
fn=
|
60 |
inputs=[model_dropdown],
|
61 |
-
outputs=[client]
|
62 |
)
|
63 |
|
64 |
# Initialize client on page load
|
65 |
demo.load(
|
66 |
fn=set_client_for_session,
|
67 |
-
inputs=gr.State("SmolVLM-Instruct"),
|
68 |
-
outputs=client
|
69 |
)
|
70 |
|
71 |
demo = demo
|
72 |
|
73 |
-
demo.launch()
|
|
|
1 |
from gradio_client import Client
|
2 |
import gradio as gr
|
3 |
+
import os
|
4 |
+
|
5 |
|
6 |
MODELS = {
|
7 |
"SmolVLM-Instruct": "akhaliq/SmolVLM-Instruct"
|
8 |
}
|
9 |
|
10 |
def create_chat_fn(client):
|
11 |
+
def chat(message, history, files=[]):
|
12 |
response = client.predict(
|
13 |
+
message={"text": message, "files": files},
|
14 |
system_prompt="You are a helpful AI assistant.",
|
15 |
temperature=0.7,
|
16 |
max_new_tokens=1024,
|
|
|
24 |
|
25 |
def set_client_for_session(model_name, request: gr.Request):
|
26 |
headers = {}
|
27 |
+
if request and hasattr(request, 'headers'):
|
28 |
+
x_ip_token = request.headers.get('x-ip-token')
|
29 |
if x_ip_token:
|
30 |
headers["X-IP-Token"] = x_ip_token
|
31 |
|
|
|
34 |
def safe_chat_fn(message, history, client):
|
35 |
if client is None:
|
36 |
return "Error: Client not initialized. Please refresh the page."
|
37 |
+
try:
|
38 |
+
return create_chat_fn(client)(message, history)
|
39 |
+
except Exception as e:
|
40 |
+
print(f"Error during chat: {str(e)}")
|
41 |
+
return f"Error during chat: {str(e)}"
|
42 |
|
43 |
with gr.Blocks() as demo:
|
44 |
|
|
|
58 |
)
|
59 |
|
60 |
# Update client when model changes
|
|
|
|
|
|
|
61 |
model_dropdown.change(
|
62 |
+
fn=set_client_for_session,
|
63 |
inputs=[model_dropdown],
|
64 |
+
outputs=[client]
|
65 |
)
|
66 |
|
67 |
# Initialize client on page load
|
68 |
demo.load(
|
69 |
fn=set_client_for_session,
|
70 |
+
inputs=[gr.State("SmolVLM-Instruct")],
|
71 |
+
outputs=[client]
|
72 |
)
|
73 |
|
74 |
demo = demo
|
75 |
|
|
app_playai.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import playai_gradio
|
3 |
+
|
4 |
+
demo =gr.load(
|
5 |
+
name='PlayDialog',
|
6 |
+
src=playai_gradio.registry,
|
7 |
+
)
|
requirements.txt
CHANGED
@@ -396,3 +396,4 @@ websockets==12.0
|
|
396 |
# via gradio-client
|
397 |
xai-gradio==0.0.2
|
398 |
# via anychat (pyproject.toml)
|
|
|
|
396 |
# via gradio-client
|
397 |
xai-gradio==0.0.2
|
398 |
# via anychat (pyproject.toml)
|
399 |
+
playai-gradio @ git+https://github.com/AK391/playai-gradio.git
|