Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -18,6 +18,7 @@ from fastapi.security import APIKeyHeader
|
|
18 |
from fastapi.responses import StreamingResponse, JSONResponse
|
19 |
from pydantic import BaseModel, Field
|
20 |
from openai import OpenAI
|
|
|
21 |
# ============================================================================
|
22 |
# Configuration and Setup
|
23 |
# ============================================================================
|
@@ -315,7 +316,6 @@ class PresentationChatModel(BaseModel):
|
|
315 |
class OutputFormatEnum(str, Enum):
|
316 |
html = "html"
|
317 |
pdf = "pdf"
|
318 |
-
pptx = "pptx"
|
319 |
|
320 |
# Class model for presentation data
|
321 |
class PresentationModel(BaseModel):
|
@@ -398,7 +398,7 @@ async def presentation_chat(query: PresentationChatModel, background_tasks: Back
|
|
398 |
llm_query = LLMAgentQueryModel(
|
399 |
prompt=query.prompt,
|
400 |
conversation_id=query.conversation_id,
|
401 |
-
system_message=
|
402 |
model_id=query.model_id,
|
403 |
user_id=query.user_id
|
404 |
)
|
|
|
18 |
from fastapi.responses import StreamingResponse, JSONResponse
|
19 |
from pydantic import BaseModel, Field
|
20 |
from openai import OpenAI
|
21 |
+
from prompts import *
|
22 |
# ============================================================================
|
23 |
# Configuration and Setup
|
24 |
# ============================================================================
|
|
|
316 |
class OutputFormatEnum(str, Enum):
|
317 |
html = "html"
|
318 |
pdf = "pdf"
|
|
|
319 |
|
320 |
# Class model for presentation data
|
321 |
class PresentationModel(BaseModel):
|
|
|
398 |
llm_query = LLMAgentQueryModel(
|
399 |
prompt=query.prompt,
|
400 |
conversation_id=query.conversation_id,
|
401 |
+
system_message=PRESENTATION_SYSTEM_PROMPT,
|
402 |
model_id=query.model_id,
|
403 |
user_id=query.user_id
|
404 |
)
|