Spaces:
Sleeping
Sleeping
jonathanjordan21
commited on
Commit
•
ecec9fc
1
Parent(s):
2843be3
Update apis/chat_api.py
Browse files- apis/chat_api.py +27 -0
apis/chat_api.py
CHANGED
@@ -25,6 +25,7 @@ from networks.huggingface_streamer import HuggingfaceStreamer
|
|
25 |
from networks.huggingchat_streamer import HuggingchatStreamer
|
26 |
from networks.openai_streamer import OpenaiStreamer
|
27 |
|
|
|
28 |
|
29 |
class ChatAPIApp:
|
30 |
def __init__(self):
|
@@ -35,6 +36,10 @@ class ChatAPIApp:
|
|
35 |
version=CONFIG["version"],
|
36 |
)
|
37 |
self.setup_routes()
|
|
|
|
|
|
|
|
|
38 |
|
39 |
def get_available_models(self):
|
40 |
return {"object": "list", "data": AVAILABLE_MODELS_DICTS}
|
@@ -136,6 +141,22 @@ class ChatAPIApp:
|
|
136 |
except Exception as e:
|
137 |
raise HTTPException(status_code=500, detail=str(e))
|
138 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
139 |
def get_readme(self):
|
140 |
readme_path = Path(__file__).parents[1] / "README.md"
|
141 |
with open(readme_path, "r", encoding="utf-8") as rf:
|
@@ -163,6 +184,12 @@ class ChatAPIApp:
|
|
163 |
summary="Chat completions in conversation session",
|
164 |
include_in_schema=include_in_schema,
|
165 |
)(self.chat_completions)
|
|
|
|
|
|
|
|
|
|
|
|
|
166 |
self.app.get(
|
167 |
"/readme",
|
168 |
summary="README of HF LLM API",
|
|
|
25 |
from networks.huggingchat_streamer import HuggingchatStreamer
|
26 |
from networks.openai_streamer import OpenaiStreamer
|
27 |
|
28 |
+
from sentence_transformers import SentenceTransformer
|
29 |
|
30 |
class ChatAPIApp:
|
31 |
def __init__(self):
|
|
|
36 |
version=CONFIG["version"],
|
37 |
)
|
38 |
self.setup_routes()
|
39 |
+
self.embeddings = {
|
40 |
+
"mxbai-embed-large":SentenceTransformer("mixedbread-ai/mxbai-embed-large-v1"),
|
41 |
+
"nomic-embed-text": SentenceTransformer("nomic-ai/nomic-embed-text-v1.5", trust_remote_code=True)
|
42 |
+
}
|
43 |
|
44 |
def get_available_models(self):
|
45 |
return {"object": "list", "data": AVAILABLE_MODELS_DICTS}
|
|
|
141 |
except Exception as e:
|
142 |
raise HTTPException(status_code=500, detail=str(e))
|
143 |
|
144 |
+
|
145 |
+
class EmbeddingRequest(BaseModel):
|
146 |
+
model: str
|
147 |
+
prompt: str
|
148 |
+
options: Optional[dict] = None
|
149 |
+
|
150 |
+
def get_embeddings(self, item: EmbeddingRequest, api_key: str = Depends(extract_api_key)):
|
151 |
+
try:
|
152 |
+
model = request.model
|
153 |
+
model_kwargs = request.options
|
154 |
+
embeddings = models[model].encode(request.prompt, convert_to_tensor=True)#, **model_kwargs)
|
155 |
+
return {"embedding": embeddings.tolist()}
|
156 |
+
except ValueError as e:
|
157 |
+
raise HTTPException(status_code=400, detail=str(e))
|
158 |
+
|
159 |
+
|
160 |
def get_readme(self):
|
161 |
readme_path = Path(__file__).parents[1] / "README.md"
|
162 |
with open(readme_path, "r", encoding="utf-8") as rf:
|
|
|
184 |
summary="Chat completions in conversation session",
|
185 |
include_in_schema=include_in_schema,
|
186 |
)(self.chat_completions)
|
187 |
+
|
188 |
+
self.app.post(
|
189 |
+
"/api/embeddings",
|
190 |
+
summary="Get Embeddings with prompt",
|
191 |
+
include_in_schema=True,
|
192 |
+
)(self.get_embeddings)
|
193 |
self.app.get(
|
194 |
"/readme",
|
195 |
summary="README of HF LLM API",
|