Spaces:
Paused
Paused
Commit
·
27d0b7a
1
Parent(s):
c755479
Added fix to streaming response
Browse files- main/routes.py +16 -4
main/routes.py
CHANGED
@@ -281,10 +281,22 @@ async def generate_stream(request: GenerateRequest):
|
|
281 |
"""Generate streaming text response from prompt"""
|
282 |
logger.info(f"Received streaming generation request for prompt: {request.prompt[:50]}...")
|
283 |
try:
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
288 |
)
|
289 |
except Exception as e:
|
290 |
logger.error(f"Error in generate_stream endpoint: {str(e)}")
|
|
|
281 |
"""Generate streaming text response from prompt"""
|
282 |
logger.info(f"Received streaming generation request for prompt: {request.prompt[:50]}...")
|
283 |
try:
|
284 |
+
async def event_generator():
|
285 |
+
async for chunk in api.generate_stream(
|
286 |
+
prompt=request.prompt,
|
287 |
+
system_message=request.system_message,
|
288 |
+
max_new_tokens=request.max_new_tokens or api.max_new_tokens
|
289 |
+
):
|
290 |
+
yield f"data: {chunk}\n\n"
|
291 |
+
yield "data: [DONE]\n\n"
|
292 |
+
|
293 |
+
return StreamingResponse(
|
294 |
+
event_generator(),
|
295 |
+
media_type="text/event-stream",
|
296 |
+
headers={
|
297 |
+
"Cache-Control": "no-cache",
|
298 |
+
"Connection": "keep-alive",
|
299 |
+
}
|
300 |
)
|
301 |
except Exception as e:
|
302 |
logger.error(f"Error in generate_stream endpoint: {str(e)}")
|