Spaces:
Runtime error
Runtime error
added backend (redis)
Browse files- db.py +70 -0
- docker-compose.yml +11 -0
- js.py +8 -0
- app.py → main.py +31 -20
- requirements.txt +3 -1
db.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# https://redis.io/docs/clients/python/
|
2 |
+
from redis import Redis
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from uuid import uuid4
|
5 |
+
|
6 |
+
load_dotenv()
|
7 |
+
import os
|
8 |
+
import json
|
9 |
+
from redis.commands.json.path import Path
|
10 |
+
|
11 |
+
from typing import TypedDict, List, Tuple, Optional
|
12 |
+
|
13 |
+
|
14 |
+
class User(TypedDict):
|
15 |
+
username: str
|
16 |
+
uid: str
|
17 |
+
|
18 |
+
|
19 |
+
class Chat(TypedDict):
|
20 |
+
patient: str
|
21 |
+
messages: List[Tuple[str, str]]
|
22 |
+
|
23 |
+
|
24 |
+
def get_client() -> Redis:
|
25 |
+
client = Redis(
|
26 |
+
host=os.environ["REDIS_HOST"],
|
27 |
+
port=os.environ["REDIS_PORT"],
|
28 |
+
decode_responses=True,
|
29 |
+
)
|
30 |
+
return client
|
31 |
+
|
32 |
+
|
33 |
+
def create_user(client: Redis, user: User):
|
34 |
+
maybe_user = get_user_by_username(client, user["username"])
|
35 |
+
if not maybe_user:
|
36 |
+
uid = uuid4()
|
37 |
+
user["uid"] = str(uid)
|
38 |
+
client.json().set(f"user:by-uid:{uid}", "$", user, nx=True)
|
39 |
+
client.set(f"user:uuid:by-username:{user['username']}", str(uid), nx=True)
|
40 |
+
client.json().set(f"chats:by-user-uid:{uid}", "$", [])
|
41 |
+
else:
|
42 |
+
print(f"User already existed with username={user['username']}")
|
43 |
+
|
44 |
+
|
45 |
+
def get_user_by_username(client: Redis, username: str) -> Optional[User]:
|
46 |
+
uid = client.get(f"user:uuid:by-username:{username}")
|
47 |
+
user = client.json().get(f"user:by-uid:{uid}")
|
48 |
+
return user
|
49 |
+
|
50 |
+
|
51 |
+
def get_user_chat_by_uid(client: Redis, uid: str) -> List[Chat]:
|
52 |
+
chats = client.json().get(f"chats:by-user-uid:{uid}")
|
53 |
+
return chats
|
54 |
+
|
55 |
+
|
56 |
+
def add_chat_by_uid(client: Redis, chat: Chat, uid: str):
|
57 |
+
client.json().arrappend(f"chats:by-user-uid:{uid}", "$", chat)
|
58 |
+
|
59 |
+
|
60 |
+
# if __name__ == "__main__":
|
61 |
+
|
62 |
+
# client = get_client()
|
63 |
+
# user = User(username="foo")
|
64 |
+
# chat = Chat(patient="foo", messages=[("a", "b"), ("c", "d")])
|
65 |
+
|
66 |
+
# create_user(client, user)
|
67 |
+
# user = get_user_by_username(client, user['username'])
|
68 |
+
# add_chat_by_uid(client, chat, "1")
|
69 |
+
# chats = get_user_chat_by_uid(client, "1")
|
70 |
+
# print(chats)
|
docker-compose.yml
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3'
|
2 |
+
services:
|
3 |
+
redis:
|
4 |
+
image: redis/redis-stack:latest
|
5 |
+
volumes:
|
6 |
+
- ${PWD}/docker-data/redis:/data
|
7 |
+
ports:
|
8 |
+
- "6379:6379"
|
9 |
+
- "8001:8001"
|
10 |
+
environment:
|
11 |
+
- REDIS_ARGS=--save 60 10
|
js.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
get_window_url_params = """
|
2 |
+
function(url_params) {
|
3 |
+
console.log(url_params);
|
4 |
+
const params = new URLSearchParams(window.location.search);
|
5 |
+
url_params = Object.fromEntries(params);
|
6 |
+
return url_params;
|
7 |
+
}
|
8 |
+
"""
|
app.py → main.py
RENAMED
@@ -8,7 +8,7 @@ load_dotenv()
|
|
8 |
|
9 |
from queue import Empty, Queue
|
10 |
from threading import Thread
|
11 |
-
|
12 |
import gradio as gr
|
13 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
14 |
from langchain.chat_models import ChatOpenAI
|
@@ -16,6 +16,10 @@ from langchain.prompts import HumanMessagePromptTemplate, SystemMessagePromptTem
|
|
16 |
from langchain.schema import AIMessage, BaseMessage, HumanMessage, SystemMessage
|
17 |
from js import get_window_url_params
|
18 |
from callback import QueueCallback
|
|
|
|
|
|
|
|
|
19 |
from db import (
|
20 |
User,
|
21 |
Chat,
|
@@ -47,6 +51,10 @@ with open("data/patients.json") as f:
|
|
47 |
|
48 |
patients_names = [el["name"] for el in patiens]
|
49 |
|
|
|
|
|
|
|
|
|
50 |
|
51 |
def message_handler(
|
52 |
chat: Optional[ChatOpenAI],
|
@@ -183,28 +191,23 @@ with gr.Blocks(
|
|
183 |
[chat, message, chatbot, messages],
|
184 |
queue=True,
|
185 |
)
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
|
|
|
|
|
|
|
|
194 |
done.click(
|
195 |
on_done_click,
|
196 |
[chatbot, patient, user],
|
197 |
[message, chatbot, messages],
|
198 |
)
|
199 |
-
with gr.Row():
|
200 |
-
with gr.Column():
|
201 |
-
clear = gr.Button("Clear")
|
202 |
-
clear.click(
|
203 |
-
on_clear_click,
|
204 |
-
[],
|
205 |
-
[message, chatbot, messages],
|
206 |
-
queue=False,
|
207 |
-
)
|
208 |
with gr.Accordion("Settings", open=False):
|
209 |
model_name = gr.Dropdown(
|
210 |
choices=MODELS_NAMES, value=MODELS_NAMES[0], label="model"
|
@@ -239,5 +242,13 @@ with gr.Blocks(
|
|
239 |
outputs=[patient_card, patient, chatbot, messages],
|
240 |
)
|
241 |
|
242 |
-
|
243 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
from queue import Empty, Queue
|
10 |
from threading import Thread
|
11 |
+
import os
|
12 |
import gradio as gr
|
13 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
14 |
from langchain.chat_models import ChatOpenAI
|
|
|
16 |
from langchain.schema import AIMessage, BaseMessage, HumanMessage, SystemMessage
|
17 |
from js import get_window_url_params
|
18 |
from callback import QueueCallback
|
19 |
+
from fastapi import FastAPI, File, UploadFile, Request
|
20 |
+
from fastapi.responses import HTMLResponse, RedirectResponse
|
21 |
+
from fastapi.staticfiles import StaticFiles
|
22 |
+
from fastapi.templating import Jinja2Templates
|
23 |
from db import (
|
24 |
User,
|
25 |
Chat,
|
|
|
51 |
|
52 |
patients_names = [el["name"] for el in patiens]
|
53 |
|
54 |
+
app = FastAPI()
|
55 |
+
os.makedirs("static", exist_ok=True)
|
56 |
+
app.mount("/static", StaticFiles(directory="static"), name="static")
|
57 |
+
templates = Jinja2Templates(directory="templates")
|
58 |
|
59 |
def message_handler(
|
60 |
chat: Optional[ChatOpenAI],
|
|
|
191 |
[chat, message, chatbot, messages],
|
192 |
queue=True,
|
193 |
)
|
194 |
+
submit = gr.Button("Send Message", variant="primary")
|
195 |
+
submit.click(
|
196 |
+
message_handler,
|
197 |
+
[chat, message, chatbot, messages],
|
198 |
+
[chat, message, chatbot, messages],
|
199 |
+
)
|
200 |
+
|
201 |
+
with gr.Row():
|
202 |
+
with gr.Column():
|
203 |
+
js = "(x) => confirm('Press a button!')"
|
204 |
+
|
205 |
+
done = gr.Button("Done", variant="stop")
|
206 |
done.click(
|
207 |
on_done_click,
|
208 |
[chatbot, patient, user],
|
209 |
[message, chatbot, messages],
|
210 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
211 |
with gr.Accordion("Settings", open=False):
|
212 |
model_name = gr.Dropdown(
|
213 |
choices=MODELS_NAMES, value=MODELS_NAMES[0], label="model"
|
|
|
242 |
outputs=[patient_card, patient, chatbot, messages],
|
243 |
)
|
244 |
|
245 |
+
@app.get("/", response_class=HTMLResponse)
|
246 |
+
async def home(request: Request):
|
247 |
+
return templates.TemplateResponse(
|
248 |
+
"home.html", {"request": request, "videos": []})
|
249 |
+
|
250 |
+
demo.queue()
|
251 |
+
|
252 |
+
gradio_app = gr.routes.App.create_app(demo)
|
253 |
+
app.mount("/gradio", gradio_app)
|
254 |
+
|
requirements.txt
CHANGED
@@ -2,4 +2,6 @@ gradio
|
|
2 |
openai
|
3 |
elevenlabs
|
4 |
python-dotenv
|
5 |
-
langchain
|
|
|
|
|
|
2 |
openai
|
3 |
elevenlabs
|
4 |
python-dotenv
|
5 |
+
langchain
|
6 |
+
fastapi
|
7 |
+
uvicorn
|