File size: 2,238 Bytes
66340f1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
from typing import Any
from fastapi import APIRouter, Depends, HTTPException
from starlette.responses import Response
import requests


from app.deps.users import current_user
from app.models.user import User

from app.vectorstore.qdrant import qdrant_manager
from app.openai.base import openai_manager
from app.openai.core import ask, filter, summarize

from pydantic import BaseModel


class QueryRequest(BaseModel):
    query: str
    document_id: int


class QueryResponse(BaseModel):
    answer: str
    document_id: int


router = APIRouter(prefix="/queries")


@router.post("/")
async def query(
    query_request: QueryRequest,
    response: Response,
    user: User = Depends(current_user),
) -> QueryResponse:
    # add check that this user actually owns this document

    query_vector = openai_manager.get_embedding(query_request.query)

    # print(">>>>>>>>>>>>")
    # print("query vector")
    # print(query_vector)

    # print(">>>>>>>>>>>>>>>")
    # print("document_id: ", query_request.document_id)
    # print("--------------")

    # print(">>>>>>>>>>>>")
    # print("user_id")
    # print(user.id.hex)

    points = qdrant_manager.search_point(
        query_vector=query_vector,
        user_id=str(user.id.hex),
        document_id=int(query_request.document_id),
        limit=1000,
    )

    # print(">>>>>>>>>>>>")
    # print("points")
    # print(points)

    context = "\n\n\n".join([point.payload["chunk"] for point in points])
    # print(">>>>>>>>>>>>")
    # print("context")
    # print(context)

    # filter_response = filter(context, query_request.query, openai_manager)
    # print(">>>>>>>>>>>>>>>>")
    # print("filter resopnse")
    # print(filter_response)
    # print("----------------")
    # remove later
    filter_response = True
    if filter_response:
        answer = ask(
            context,
            query_request.query,
            openai_manager,
        )

        query_response = QueryResponse(
            answer=answer, document_id=query_request.document_id
        )

    else:
        query_response = QueryResponse(
            answer="Sorry, Your question is out of Context!",
            document_id=query_request.document_id,
        )

    return query_response