chatapi / app.py
ogegadavis254's picture
Create app.py
3bb8132 verified
raw
history blame
1.31 kB
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from huggingface_hub import InferenceClient
app = FastAPI()
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
class PromptRequest(BaseModel):
prompt: str
history: list
def format_prompt(message, history):
system_prompt = "You are Mistral, a gentle and a useful AI assistant. My input is "
prompt = "<s>"
prompt += f"[INST] {system_prompt} [/INST]"
for user_prompt, bot_response in history:
prompt += f"[INST] {user_prompt} [/INST]"
prompt += f" {bot_response}</s> "
prompt += f"[INST] {message} [/INST]"
return prompt
def generate(prompt, history):
formatted_prompt = format_prompt(prompt, history)
stream = client.text_generation(formatted_prompt, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
yield output
@app.post("/generate/")
async def generate_response(request: PromptRequest):
try:
responses = []
async for response in generate(request.prompt, request.history):
responses.append(response)
return {"responses": responses}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))