Spaces:
Sleeping
Sleeping
kltn20133118
commited on
Upload 258 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +9 -0
- auth/__pycache__/authentication.cpython-310.pyc +0 -0
- auth/__pycache__/authentication.cpython-311.pyc +0 -0
- auth/authentication.py +133 -0
- controller/AuthenticationController.py +63 -0
- controller/ChatController.py +37 -0
- controller/DefaultController.py +75 -0
- controller/FileController.py +64 -0
- controller/MySQLController.py +94 -0
- controller/OTPController.py +37 -0
- controller/UserController.py +96 -0
- controller/__pycache__/AuthenticationController.cpython-310.pyc +0 -0
- controller/__pycache__/ChatController.cpython-310.pyc +0 -0
- controller/__pycache__/ChatController.cpython-311.pyc +0 -0
- controller/__pycache__/ChatController.py +35 -0
- controller/__pycache__/DefaultController.cpython-310.pyc +0 -0
- controller/__pycache__/DefaultController.cpython-311.pyc +0 -0
- controller/__pycache__/FileController.cpython-310.pyc +0 -0
- controller/__pycache__/FileController.cpython-311.pyc +0 -0
- controller/__pycache__/MySQLController.cpython-310.pyc +0 -0
- controller/__pycache__/MySQLController.cpython-311.pyc +0 -0
- controller/__pycache__/OTPController.cpython-310.pyc +0 -0
- controller/__pycache__/OTPController.cpython-311.pyc +0 -0
- controller/__pycache__/UserController.cpython-310.pyc +0 -0
- function/__pycache__/chatbot.cpython-310.pyc +0 -0
- function/__pycache__/chatbot.cpython-311.pyc +0 -0
- function/__pycache__/dropbox.cpython-310.pyc +0 -0
- function/__pycache__/dropbox.cpython-311.pyc +0 -0
- function/__pycache__/support_function.cpython-310.pyc +0 -0
- function/__pycache__/support_function.cpython-311.pyc +0 -0
- function/chatbot.py +726 -0
- function/dropbox.py +155 -0
- function/support_function.py +128 -0
- models/Database_Entity.py +68 -0
- models/__pycache__/Database_Entity.cpython-310.pyc +0 -0
- models/__pycache__/Database_Entity.cpython-311.pyc +0 -0
- models/__pycache__/__init__.cpython-310.pyc +0 -0
- repository/ChatHistoryRepository.py +276 -0
- repository/ConfigDatabase.py +48 -0
- repository/DetailChatRepository.py +141 -0
- repository/OTPRepository.py +82 -0
- repository/UserInfoRepository.py +139 -0
- repository/UserLoginRepository.py +135 -0
- repository/UserRepository.py +357 -0
- repository/__pycache__/ChatHistoryRepository.cpython-310.pyc +0 -0
- repository/__pycache__/ChatHistoryRepository.cpython-311.pyc +0 -0
- repository/__pycache__/ConfigDatabase.cpython-310.pyc +0 -0
- repository/__pycache__/ConfigDatabase.cpython-311.pyc +0 -0
- repository/__pycache__/DetailChatRepository.cpython-310.pyc +0 -0
- repository/__pycache__/DetailChatRepository.cpython-311.pyc +0 -0
.gitattributes
CHANGED
@@ -38,3 +38,12 @@ tests/test_service/user_file/quangphuc@gmail.com/demo1.pdf filter=lfs diff=lfs m
|
|
38 |
tests/user_file/quangphuc@gmail.com/demo1.pdf filter=lfs diff=lfs merge=lfs -text
|
39 |
vector_database/vonhuy5112002@gmail.com/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
40 |
vector_database/vonhuy777@gmail.com/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
tests/user_file/quangphuc@gmail.com/demo1.pdf filter=lfs diff=lfs merge=lfs -text
|
39 |
vector_database/vonhuy5112002@gmail.com/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
40 |
vector_database/vonhuy777@gmail.com/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
41 |
+
user_file/20133118@student.hcmute.edu.vn/DEMO2.pdf filter=lfs diff=lfs merge=lfs -text
|
42 |
+
user_file/vonhuy5112002@gmail.com/Nhom05_FinalProject3.docx filter=lfs diff=lfs merge=lfs -text
|
43 |
+
user_file/vonhuy5112002@gmail.com/Report-ChatGPT.docx filter=lfs diff=lfs merge=lfs -text
|
44 |
+
user_file/vonhuy777@gmail.com/DEMO2.pdf filter=lfs diff=lfs merge=lfs -text
|
45 |
+
vector_database/20133118@student.hcmute.edu.vn/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
46 |
+
vector_database/20133118@student.hcmute.edu.vn/DEMO1.docx/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
47 |
+
vector_database/20133118@student.hcmute.edu.vn/DEMO2.pdf/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
48 |
+
vector_database/vonhuy5112002@gmail.com/Nhom05_FinalProject3.docx/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
49 |
+
vector_database/vonhuy777@gmail.com/DEMO2.pdf/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
auth/__pycache__/authentication.cpython-310.pyc
ADDED
Binary file (4.64 kB). View file
|
|
auth/__pycache__/authentication.cpython-311.pyc
ADDED
Binary file (8.37 kB). View file
|
|
auth/authentication.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import time
|
2 |
+
from typing import Dict
|
3 |
+
import jwt
|
4 |
+
import secrets
|
5 |
+
import logging
|
6 |
+
from fastapi import Depends, HTTPException
|
7 |
+
import base64
|
8 |
+
from datetime import datetime, timedelta
|
9 |
+
from repository import UserRepository, UserLoginRepository
|
10 |
+
import string, random
|
11 |
+
|
12 |
+
def check_token_is_valid(token):
|
13 |
+
check = UserRepository.getEmailUserByAccessToken(token)
|
14 |
+
if check is None:
|
15 |
+
return False
|
16 |
+
return True
|
17 |
+
|
18 |
+
def unique_string(byte: int = 8) -> str:
|
19 |
+
return secrets.token_urlsafe(byte)
|
20 |
+
JWT_SECRET = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
|
21 |
+
JWT_ALGORITHM = "HS512"
|
22 |
+
SECRET_KEY= "8deadce9449770680910741063cd0a3fe0acb62a8978661f421bbcbb66dc41f1"
|
23 |
+
|
24 |
+
def token_response(token: str):
|
25 |
+
return {
|
26 |
+
"access_token": token
|
27 |
+
}
|
28 |
+
def str_encode(string: str) -> str:
|
29 |
+
return base64.b85encode(string.encode('ascii')).decode('ascii')
|
30 |
+
|
31 |
+
def get_token_payload(token: str, secret: str, algo: str):
|
32 |
+
try:
|
33 |
+
payload = jwt.decode(token, secret, algorithms=algo)
|
34 |
+
except Exception as jwt_exec:
|
35 |
+
logging.debug(f"JWT Error: {str(jwt_exec)}")
|
36 |
+
payload = None
|
37 |
+
return payload
|
38 |
+
|
39 |
+
from datetime import datetime
|
40 |
+
def generate_token(payload: dict, secret: str, algo: str, expiry: timedelta):
|
41 |
+
expire = datetime.now() + expiry
|
42 |
+
payload.update({"exp": expire})
|
43 |
+
return jwt.encode(payload, secret, algorithm=algo)
|
44 |
+
|
45 |
+
def str_decode(string: str) -> str:
|
46 |
+
return base64.b85decode(string.encode('ascii')).decode('ascii')
|
47 |
+
|
48 |
+
def generate_random_string(length=12):
|
49 |
+
characters = string.ascii_letters + string.digits
|
50 |
+
random_string = ''.join(random.choice(characters) for i in range(length))
|
51 |
+
return random_string
|
52 |
+
|
53 |
+
import pytz
|
54 |
+
from datetime import datetime
|
55 |
+
def signJWT(user_email: str) -> Dict[str, str]:
|
56 |
+
rt_expires = timedelta(days=3)
|
57 |
+
refresh_key = unique_string(100)
|
58 |
+
access_key = unique_string(50)
|
59 |
+
at_expires = timedelta(minutes=180)
|
60 |
+
at_payload = {
|
61 |
+
"sub": str_encode(str(user_email)),
|
62 |
+
'a': access_key,
|
63 |
+
}
|
64 |
+
access_token = generate_token(at_payload, JWT_SECRET, JWT_ALGORITHM, at_expires)
|
65 |
+
rt_payload = {"sub": str_encode(str(user_email)), "t": refresh_key, 'a': access_key}
|
66 |
+
refresh_token = generate_token(rt_payload, SECRET_KEY,JWT_ALGORITHM, rt_expires)
|
67 |
+
expires_in = at_expires.seconds
|
68 |
+
vn_timezone = pytz.timezone('Asia/Ho_Chi_Minh')
|
69 |
+
current_time = datetime.now().replace(tzinfo=pytz.utc).astimezone(vn_timezone) + timedelta(seconds=expires_in)
|
70 |
+
formatted_time = current_time.strftime('%Y-%m-%d %H:%M:%S ')
|
71 |
+
existing_user = UserRepository.getUserByEmail(user_email)
|
72 |
+
if existing_user is None:
|
73 |
+
UserRepository.addUser(user_email, access_token, refresh_token, formatted_time)
|
74 |
+
else:
|
75 |
+
UserRepository.updateUserLogin(user_email, access_token, refresh_token, formatted_time)
|
76 |
+
user_record = UserRepository.getUserByEmail(user_email)
|
77 |
+
session_id = ""
|
78 |
+
if user_record:
|
79 |
+
session_id = generate_random_string()
|
80 |
+
existing_userlogin = UserLoginRepository.getUserLogin(user_email)
|
81 |
+
if existing_userlogin is None:
|
82 |
+
UserLoginRepository.addUserLogin(user_email,session_id=session_id)
|
83 |
+
else:
|
84 |
+
UserLoginRepository.updateUserLogin(user_email, session_id)
|
85 |
+
return {
|
86 |
+
"access_token": access_token,
|
87 |
+
"refresh_token": refresh_token,
|
88 |
+
"expires_in": at_expires.seconds,
|
89 |
+
"session_id": session_id
|
90 |
+
}
|
91 |
+
|
92 |
+
def returnAccessToken(user_email: str, refresh_token: str) -> Dict[str, str]:
|
93 |
+
access_key = unique_string(50)
|
94 |
+
at_expires = timedelta(minutes=180)
|
95 |
+
at_payload = {
|
96 |
+
"sub": str_encode(str(user_email)),
|
97 |
+
'a': access_key,
|
98 |
+
}
|
99 |
+
access_token = generate_token(at_payload, JWT_SECRET, JWT_ALGORITHM, at_expires)
|
100 |
+
user_record = UserRepository.getUserByEmail(user_email)
|
101 |
+
session_id = ""
|
102 |
+
if user_record:
|
103 |
+
email1 = user_record.email
|
104 |
+
if email1:
|
105 |
+
session_id = generate_random_string()
|
106 |
+
existing_userlogin = UserLoginRepository.getUserLogin(user_email)
|
107 |
+
if existing_userlogin is None:
|
108 |
+
UserLoginRepository.addUserLogin(user_email,session_id=session_id)
|
109 |
+
else:
|
110 |
+
UserLoginRepository.updateUserLogin(user_email,session_id)
|
111 |
+
return {
|
112 |
+
"access_token": access_token,
|
113 |
+
"refresh_token": refresh_token,
|
114 |
+
"expires_in": at_expires.seconds,
|
115 |
+
"session_id": session_id
|
116 |
+
}
|
117 |
+
|
118 |
+
def decodeJWT(token: str) -> dict:
|
119 |
+
try:
|
120 |
+
decoded_token = jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALGORITHM])
|
121 |
+
return decoded_token if decoded_token["exp"] >= time.time() else None
|
122 |
+
except:
|
123 |
+
return {}
|
124 |
+
|
125 |
+
def get_refresh_token(refresh_token, email):
|
126 |
+
token_payload = get_token_payload(refresh_token, SECRET_KEY, JWT_ALGORITHM)
|
127 |
+
if not token_payload:
|
128 |
+
raise HTTPException(status_code=403, detail="Invalid Request.")
|
129 |
+
exp = token_payload.get('exp')
|
130 |
+
if exp >= time.time() and token_payload:
|
131 |
+
return returnAccessToken(email,refresh_token)
|
132 |
+
elif not token_payload:
|
133 |
+
return signJWT(email)
|
controller/AuthenticationController.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Query
|
2 |
+
from request import RequestAuth
|
3 |
+
from response import ResponseAuth as res
|
4 |
+
from service import AuthService
|
5 |
+
from function import support_function
|
6 |
+
from fastapi import HTTPException
|
7 |
+
router = APIRouter()
|
8 |
+
|
9 |
+
@router.post('/login', tags=["Authentication"])
|
10 |
+
async def login(request: RequestAuth.RequestLoginEmail):
|
11 |
+
email = request.email
|
12 |
+
check = support_function.check_value_email_controller(email)
|
13 |
+
if check is not True:
|
14 |
+
return check
|
15 |
+
password = request.password
|
16 |
+
if password is None or password.strip() == "":
|
17 |
+
return res.ReponseError(status=400,
|
18 |
+
data=res.Message(message="Password is required."))
|
19 |
+
return await AuthService.login(request)
|
20 |
+
|
21 |
+
@router.post('/login_google', tags=["Authentication"])
|
22 |
+
async def login_google(request: RequestAuth.RequestLoginGoogle):
|
23 |
+
email = request.email
|
24 |
+
token_google = request.token_google
|
25 |
+
check = support_function.check_value_email_controller(email)
|
26 |
+
if check is not True:
|
27 |
+
return check
|
28 |
+
if token_google is None or token_google.strip() == "":
|
29 |
+
return res.ReponseError(status=400,
|
30 |
+
data=res.Message(message="token_google oauth2 is required."))
|
31 |
+
if token_google.isdigit():
|
32 |
+
return res.ReponseError(status=400,
|
33 |
+
data=res.Message(message="token_google must be a string, not a number."))
|
34 |
+
return await AuthService.login_google(request)
|
35 |
+
|
36 |
+
@router.post('/sign_up', tags=["Authentication"])
|
37 |
+
async def signup(request: RequestAuth.RequestRegister):
|
38 |
+
email = request.email
|
39 |
+
check = support_function.check_value_email_controller(email)
|
40 |
+
if check is not True:
|
41 |
+
return check
|
42 |
+
password = request.password
|
43 |
+
confirm_password = request.confirm_password
|
44 |
+
if password is None or password.strip( )== "":
|
45 |
+
return res.ReponseError(status=400,
|
46 |
+
data=res.Message(message="Password is required."))
|
47 |
+
if confirm_password is None or confirm_password.strip() == "":
|
48 |
+
return res.ReponseError(status=400,
|
49 |
+
data=res.Message(message="Confirm Password is required."))
|
50 |
+
return await AuthService.sign_up(request)
|
51 |
+
|
52 |
+
|
53 |
+
@router.post('/refresh_token', tags=["Authentication"])
|
54 |
+
async def refresh_token_account(request: RequestAuth.RequestRefreshTokenLogin):
|
55 |
+
token = request.refresh_token
|
56 |
+
if token is None or token.strip() == "":
|
57 |
+
return res.ReponseError(status=400,
|
58 |
+
data=res.Message(message="token is required."))
|
59 |
+
elif token.isdigit():
|
60 |
+
return res.ReponseError(status=400,
|
61 |
+
data=res.Message(message="token must be string"))
|
62 |
+
|
63 |
+
return await AuthService.refresh_token(request)
|
controller/ChatController.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Form, Request
|
2 |
+
from service import ChatService
|
3 |
+
from request import RequestChat
|
4 |
+
from typing import Optional
|
5 |
+
from fastapi.requests import Request
|
6 |
+
from function import support_function
|
7 |
+
from response import ResponseChat as res
|
8 |
+
from fastapi import Path,Query
|
9 |
+
router = APIRouter()
|
10 |
+
|
11 |
+
@router.post("/chatbot/query", tags=["Chat"])
|
12 |
+
async def handle_query2_upgrade_old(request: Request,
|
13 |
+
user_id: str = Form(None),
|
14 |
+
text_all: str = Form(...),
|
15 |
+
question: Optional[str] = Form(None),
|
16 |
+
chat_name: Optional[str] = Form(None)):
|
17 |
+
check = support_function.check_value_user_id_controller(user_id)
|
18 |
+
if check is not True:
|
19 |
+
return check
|
20 |
+
request = RequestChat.RequestQuery2UpgradeOld(user_id=user_id, text_all=text_all, question=question, chat_name=chat_name)
|
21 |
+
return await ChatService.query2_upgrade_old(request)
|
22 |
+
|
23 |
+
@router.get("/chatbot/extract_file/{user_id}", tags=["Chat"])
|
24 |
+
async def extract_file(user_id: str):
|
25 |
+
check = support_function.check_value_user_id_controller(user_id)
|
26 |
+
if check is not True:
|
27 |
+
return check
|
28 |
+
request = RequestChat.RequestExtractFile(user_id=user_id)
|
29 |
+
return await ChatService.extract_file(request)
|
30 |
+
|
31 |
+
@router.get("/chatbot/generate_question/{user_id}",tags=["Chat"])
|
32 |
+
async def generate_question(user_id: str):
|
33 |
+
check = support_function.check_value_user_id_controller(user_id)
|
34 |
+
if check is not True:
|
35 |
+
return check
|
36 |
+
request = RequestChat.RequestGenerateQuestion(user_id=user_id)
|
37 |
+
return await ChatService.generate_question(request)
|
controller/DefaultController.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import HTTPException, Depends, Query,APIRouter
|
2 |
+
from service import DefaultService
|
3 |
+
from request import RequestDefault
|
4 |
+
from request import RequestDefault as req
|
5 |
+
from function import support_function
|
6 |
+
from auth.authentication import decodeJWT
|
7 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
8 |
+
from fastapi.responses import JSONResponse
|
9 |
+
from auth import authentication
|
10 |
+
from fastapi.requests import Request
|
11 |
+
from response import ResponseDefault as res
|
12 |
+
from fastapi import File, UploadFile, Form
|
13 |
+
router = APIRouter()
|
14 |
+
|
15 |
+
@router.get("/is_me", tags=["Default"])
|
16 |
+
async def is_me(token: str = Query(...)):
|
17 |
+
if token.strip() == "" or token is None:
|
18 |
+
return res.ReponseError(status=400,
|
19 |
+
data=res.Message(message="Token field is required."))
|
20 |
+
if token.lower() == "none":
|
21 |
+
return res.ReponseError(status=400,
|
22 |
+
data=res.Message(message="Token cannot be None."))
|
23 |
+
if not isinstance(token, str):
|
24 |
+
return res.ReponseError(status=400,
|
25 |
+
data=res.Message(message="Token must be a non-empty string."))
|
26 |
+
try:
|
27 |
+
float(token)
|
28 |
+
return res.ReponseError(status=400,
|
29 |
+
data=res.Message(message="Token must be a string, not a number."))
|
30 |
+
except ValueError:
|
31 |
+
pass
|
32 |
+
request = RequestDefault.RequestIsMe(token=token)
|
33 |
+
return await DefaultService.is_me(request)
|
34 |
+
|
35 |
+
@router.post('/create_firebase_user_google', tags=["Default"])
|
36 |
+
async def get_or_create_firebase_user(request: RequestDefault.RequestCreateFireBaseUserGoogle):
|
37 |
+
email = request.email
|
38 |
+
check = support_function.check_value_email_controller(request.email)
|
39 |
+
if check is not True:
|
40 |
+
return check
|
41 |
+
token_google = request.token_google
|
42 |
+
if token_google == "" or token_google is None:
|
43 |
+
return res.ReponseError(status=400,
|
44 |
+
data=res.Message(message="Token field is required."))
|
45 |
+
if not isinstance(token_google, str):
|
46 |
+
return res.ReponseError(status=400,
|
47 |
+
data=res.Message(message="Token must be a non-empty string."))
|
48 |
+
try:
|
49 |
+
float(token_google)
|
50 |
+
return res.ReponseError(status=400,
|
51 |
+
data=res.Message(message="Token must be a string, not a number."))
|
52 |
+
except ValueError:
|
53 |
+
pass
|
54 |
+
return await DefaultService.create_firebase_user(request)
|
55 |
+
|
56 |
+
@router.get("/info_user/{user_id}", tags=["Default"])
|
57 |
+
async def get_user(user_id: str):
|
58 |
+
check = support_function.check_value_user_id_controller(user_id)
|
59 |
+
if check is not True:
|
60 |
+
return check
|
61 |
+
request = RequestDefault.RequestInfoUser(user_id=user_id)
|
62 |
+
return await DefaultService.info_user(request)
|
63 |
+
|
64 |
+
ALLOWED_IMAGE_EXTENSIONS = {"jpeg", "jpg", "png"}
|
65 |
+
|
66 |
+
def allowed_file(filename: str) -> bool:
|
67 |
+
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_IMAGE_EXTENSIONS
|
68 |
+
|
69 |
+
@router.post("/upload_image", tags=["Default"])
|
70 |
+
async def upload_image(user_id: str = Form(None), file: UploadFile = File(...)):
|
71 |
+
check = support_function.check_value_user_id_controller(user_id)
|
72 |
+
if check is not True:
|
73 |
+
return check
|
74 |
+
request = req.RequestUpLoadImage(user_id=user_id, files= file)
|
75 |
+
return await DefaultService.upload_image_service(request)
|
controller/FileController.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Form, File, UploadFile,Query
|
2 |
+
from typing import List,Optional
|
3 |
+
from service import FileService
|
4 |
+
from function import support_function
|
5 |
+
from fastapi import HTTPException
|
6 |
+
from response import ResponseFile as res
|
7 |
+
from request import RequestFile
|
8 |
+
router = APIRouter()
|
9 |
+
|
10 |
+
ALLOWED_EXTENSIONS = {'csv', 'txt', 'doc', 'docx', 'pdf', 'xlsx', 'pptx', 'json', 'html'}
|
11 |
+
def allowed_file(filename):
|
12 |
+
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
13 |
+
|
14 |
+
@router.delete("/delete", tags=["File"])
|
15 |
+
async def delete_folder(request: RequestFile.RequestDeleteAllFile):
|
16 |
+
check = support_function.check_value_user_id_controller(request.user_id)
|
17 |
+
if check is not True:
|
18 |
+
return check
|
19 |
+
# request = RequestFile.RequestDeleteAllFile(user_id=user_id)
|
20 |
+
return await FileService.deleteAllFile(request)
|
21 |
+
|
22 |
+
@router.get("/list_name_files", tags=["File"])
|
23 |
+
async def get_name(user_id: str):
|
24 |
+
check = support_function.check_value_user_id_controller(user_id)
|
25 |
+
if check is not True:
|
26 |
+
return check
|
27 |
+
request = RequestFile.RequestGetNameFile(user_id=user_id)
|
28 |
+
return await FileService.listNameFiles(request)
|
29 |
+
|
30 |
+
@router.delete("/delete_file", tags=["File"])
|
31 |
+
async def delete_one_file(request: RequestFile.RequestDeleteFile):
|
32 |
+
user_id = request.user_id
|
33 |
+
check = support_function.check_value_user_id_controller(user_id)
|
34 |
+
if check is not True:
|
35 |
+
return check
|
36 |
+
name_file = request.name_file
|
37 |
+
if name_file is None or name_file.strip() == "":
|
38 |
+
return res.ReponseError(status=400,
|
39 |
+
data=res.Message(message="Name file is required."))
|
40 |
+
return await FileService.deleteFile(request)
|
41 |
+
|
42 |
+
@router.post("/chatbot/download_folder", tags=["File"])
|
43 |
+
async def download_folder_from_dropbox(request: RequestFile.RequestDownLoadFolder):
|
44 |
+
user_id = request.user_id
|
45 |
+
check = support_function.check_value_user_id_controller(user_id)
|
46 |
+
if check is not True:
|
47 |
+
return check
|
48 |
+
return await FileService.download_folder(request)
|
49 |
+
|
50 |
+
@router.post("/chatbot/download_files", tags=["File"])
|
51 |
+
async def download_file_by_id(request: RequestFile.RequestDownLoadFile):
|
52 |
+
user_id = request.user_id
|
53 |
+
check = support_function.check_value_user_id_controller(user_id)
|
54 |
+
if check is not True:
|
55 |
+
return check
|
56 |
+
return await FileService.download_file(request)
|
57 |
+
|
58 |
+
@router.post("/upload_files", tags=["File"])
|
59 |
+
async def upload_files_dropbox(user_id: str = Form(None), files: Optional[List[UploadFile]] = File(None)):
|
60 |
+
check = support_function.check_value_user_id_controller(user_id)
|
61 |
+
if check is not True:
|
62 |
+
return check
|
63 |
+
request = RequestFile.RequestUploadFile(files=files, user_id=user_id)
|
64 |
+
return await FileService.upload_files(request)
|
controller/MySQLController.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import Query, APIRouter
|
2 |
+
from service import MySQLService
|
3 |
+
from request import RequestMySQL
|
4 |
+
from response import ResponseMySQL as res
|
5 |
+
from typing import Optional
|
6 |
+
from request import RequestMySQL as req
|
7 |
+
from function import support_function
|
8 |
+
from fastapi import HTTPException
|
9 |
+
router = APIRouter()
|
10 |
+
|
11 |
+
@router.get("/chat_history/{user_id}", tags=["MySQL"])
|
12 |
+
async def render_chat(user_id: str):
|
13 |
+
check = support_function.check_value_user_id_controller(user_id)
|
14 |
+
if check is not True:
|
15 |
+
return check
|
16 |
+
request = RequestMySQL.RequestRenderChatHistory(user_id=user_id)
|
17 |
+
return await MySQLService.render_chat_history(request)
|
18 |
+
|
19 |
+
@router.get("/data_relevant/{detail_chat_id}", tags=["MySQL"])
|
20 |
+
async def render_chat_1(detail_chat_id: str):
|
21 |
+
if detail_chat_id is None or detail_chat_id.strip() == "":
|
22 |
+
return res.ReponseError(status=400,
|
23 |
+
data=res.Message(message="Id field is required."))
|
24 |
+
detail_chat_id = detail_chat_id.strip("'").strip('"')
|
25 |
+
try:
|
26 |
+
detail_chat_id_int = int(detail_chat_id)
|
27 |
+
except ValueError:
|
28 |
+
return res.ReponseError(status=400,
|
29 |
+
data=res.Message(message="Value must be an integer"))
|
30 |
+
if not support_function.is_positive_integer(detail_chat_id_int):
|
31 |
+
return res.ReponseError(status=400,
|
32 |
+
data=res.Message(message="Value must be greater than 0"))
|
33 |
+
request = req.RequestGetChatDetails(id=detail_chat_id)
|
34 |
+
return await MySQLService.get_detail_chat_by_chat_id(request)
|
35 |
+
|
36 |
+
@router.get("/detail_chat/{user_id}/{chat_id}", tags=["MySQL"])
|
37 |
+
async def load_chat(chat_id: str, user_id: str):
|
38 |
+
check = support_function.check_value_user_id_controller(user_id)
|
39 |
+
if check is not True:
|
40 |
+
return check
|
41 |
+
if chat_id is None or chat_id.strip() == "":
|
42 |
+
return res.ReponseError(status=400,
|
43 |
+
data=res.Message(message="Chat id field is required."))
|
44 |
+
chat_id = chat_id.strip("'").strip('"')
|
45 |
+
try:
|
46 |
+
chat_id_int = int(chat_id)
|
47 |
+
except ValueError:
|
48 |
+
return res.ReponseError(status=400,
|
49 |
+
data=res.Message(message="Value must be an integer"))
|
50 |
+
if not support_function.is_positive_integer(chat_id_int):
|
51 |
+
return res.ReponseError(status=400,
|
52 |
+
data=res.Message(message="Value must be greater than 0"))
|
53 |
+
request = req.RequestLoadChatHistory(chat_id=chat_id,user_id = user_id)
|
54 |
+
return await MySQLService.load_chat_history(request)
|
55 |
+
|
56 |
+
@router.put("/edit_chat", tags=["MySQL"])
|
57 |
+
async def edit_chat(request: RequestMySQL.RequestEditNameChat):
|
58 |
+
user_id = request.user_id
|
59 |
+
check = support_function.check_value_user_id_controller(user_id)
|
60 |
+
if check is not True:
|
61 |
+
return check
|
62 |
+
return await MySQLService.edit_chat(request)
|
63 |
+
|
64 |
+
@router.delete("/chat_history/delete", tags=["MySQL"])
|
65 |
+
async def delete_chat(request: RequestMySQL.RequestDeleteChat):
|
66 |
+
user_id = request.user_id
|
67 |
+
check = support_function.check_value_user_id_controller(user_id)
|
68 |
+
if check is not True:
|
69 |
+
return check
|
70 |
+
return await MySQLService.delete_chat(request)
|
71 |
+
|
72 |
+
@router.delete("/detail_chat/delete", tags=["MySQL"])
|
73 |
+
async def delete_chat_detail(request: RequestMySQL.RequestDeleteDetailChat):
|
74 |
+
user_id = request.user_id
|
75 |
+
check = support_function.check_value_user_id_controller(user_id)
|
76 |
+
if check is not True:
|
77 |
+
return check
|
78 |
+
return await MySQLService.delete_chat_detail_by_id(request)
|
79 |
+
|
80 |
+
@router.post("/chat_history/create", tags=["MySQL"])
|
81 |
+
async def create_chat_history(request: RequestMySQL.RequestCreateChatHistory):
|
82 |
+
user_id = request.user_id
|
83 |
+
check = support_function.check_value_user_id_controller(user_id)
|
84 |
+
if check is not True:
|
85 |
+
return check
|
86 |
+
return await MySQLService.create_chat_history(request)
|
87 |
+
|
88 |
+
@router.delete("/chat_history/delete_last_chat_record", tags=["MySQL"])
|
89 |
+
async def delete_last_chat_record(request: RequestMySQL.RequestStopChat):
|
90 |
+
user_id = request.user_id
|
91 |
+
check = support_function.check_value_user_id_controller(user_id)
|
92 |
+
if check is not True:
|
93 |
+
return check
|
94 |
+
return await MySQLService.delete_last_chat_detail_by_chat_name(request)
|
controller/OTPController.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter
|
2 |
+
from function import support_function
|
3 |
+
from request import RequestOTP
|
4 |
+
from service import OTPService
|
5 |
+
from fastapi import HTTPException
|
6 |
+
from pydantic.error_wrappers import ErrorWrapper
|
7 |
+
from pydantic import BaseModel
|
8 |
+
from response import ResponseOTP as res
|
9 |
+
router = APIRouter()
|
10 |
+
|
11 |
+
@router.post('/create_otp', tags=["OTP"])
|
12 |
+
async def create_otp(request: RequestOTP.RequestCreateOTP):
|
13 |
+
email = request.email
|
14 |
+
check = support_function.check_value_email_controller(email)
|
15 |
+
if check is not True:
|
16 |
+
return check
|
17 |
+
return await OTPService.createOTP(request)
|
18 |
+
|
19 |
+
@router.post('/verify_otp', tags=["OTP"])
|
20 |
+
async def verify_otp(request: RequestOTP.RequestVerifyOTP):
|
21 |
+
check = support_function.check_value_email_controller(request.email)
|
22 |
+
if check is not True:
|
23 |
+
return check
|
24 |
+
check_otp = support_function.check_value_otp(request.otp)
|
25 |
+
if check_otp is not True:
|
26 |
+
return check_otp
|
27 |
+
return await OTPService.verifyOTP(request)
|
28 |
+
|
29 |
+
@router.post('/verify_otp_reset_password', tags=["OTP"])
|
30 |
+
async def verify_otp_reset(request: RequestOTP.RequestVerifyOTP):
|
31 |
+
check = support_function.check_value_email_controller(request.email)
|
32 |
+
if check is not True:
|
33 |
+
return check
|
34 |
+
check_otp = support_function.check_value_otp(request.otp)
|
35 |
+
if check_otp is not True:
|
36 |
+
return check_otp
|
37 |
+
return await OTPService.verifyOTPReset(request)
|
controller/UserController.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Query
|
2 |
+
from request import RequestUser
|
3 |
+
from response import ResponseUser as res
|
4 |
+
from service import UserService
|
5 |
+
from function import support_function
|
6 |
+
from fastapi import HTTPException
|
7 |
+
from response import ResponseUser as res
|
8 |
+
router = APIRouter()
|
9 |
+
|
10 |
+
@router.put("/update_user_info", tags=["User"])
|
11 |
+
async def update_user_info(request: RequestUser.RequestUpdateUserInfo):
|
12 |
+
user_id = request.user_id
|
13 |
+
check = support_function.check_value_user_id_controller(user_id)
|
14 |
+
if check is not True:
|
15 |
+
return check
|
16 |
+
uid = request.uid
|
17 |
+
email = request.email
|
18 |
+
display_name = request.display_name
|
19 |
+
photo_url = request.photo_url
|
20 |
+
if uid is None or uid.strip() == "":
|
21 |
+
return res.ReponseError(status=400,
|
22 |
+
data=res.Message(message="uid field is required."))
|
23 |
+
if email is None or email.strip() == "":
|
24 |
+
return res.ReponseError(status=400,
|
25 |
+
data=res.Message(message="email field is required."))
|
26 |
+
if display_name is None or display_name.strip() == "":
|
27 |
+
return res.ReponseError(status=400,
|
28 |
+
data=res.Message(message="display_name field is required."))
|
29 |
+
if photo_url is None or photo_url.strip() == "":
|
30 |
+
return res.ReponseError(status=400,
|
31 |
+
data=res.Message(message="photo_url field is required."))
|
32 |
+
return await UserService.update_user_info(request)
|
33 |
+
|
34 |
+
@router.get('/check_info_google', tags=["User"])
|
35 |
+
async def check_info_google(user_id: str = Query(None)):
|
36 |
+
check = support_function.check_value_user_id_controller(user_id)
|
37 |
+
if check is not True:
|
38 |
+
return check
|
39 |
+
request =RequestUser.RequestCheckInfoGoogle(user_id=user_id)
|
40 |
+
return await UserService.check_info_google(request)
|
41 |
+
|
42 |
+
@router.get('/check_info_google_signup', tags=["User"])
|
43 |
+
async def check_info_google_signup(email: str = None):
|
44 |
+
check = support_function.check_value_email_controller(email)
|
45 |
+
if check is not True:
|
46 |
+
return check
|
47 |
+
request =RequestUser.RequestCheckInfoGoogleEmail(email=email)
|
48 |
+
return await UserService.check_info_google_email(request)
|
49 |
+
|
50 |
+
@router.get('/check_state_login', tags=["User"])
|
51 |
+
async def check_state_login(user_id: str = Query(None), session_id_now: str = Query(None)):
|
52 |
+
check = support_function.check_value_user_id_controller(user_id)
|
53 |
+
if check is not True:
|
54 |
+
return check
|
55 |
+
if session_id_now is None or session_id_now.strip() == "":
|
56 |
+
return res.ReponseError(status=400,
|
57 |
+
data=res.Message(message="Session Id is required."))
|
58 |
+
try:
|
59 |
+
int(session_id_now)
|
60 |
+
return res.ReponseError(status=400,
|
61 |
+
data=res.Message(message="Session Id must be a string, not a number."))
|
62 |
+
except ValueError:
|
63 |
+
pass
|
64 |
+
request =RequestUser.RequestCheckStateLogin(user_id=user_id, session_id_now=session_id_now)
|
65 |
+
return await UserService.check_state_login(request)
|
66 |
+
|
67 |
+
|
68 |
+
@router.post('/reset_password', tags=["User"])
|
69 |
+
async def reset_password(request:RequestUser.RequestResetPassword):
|
70 |
+
email = request.email
|
71 |
+
check = support_function.check_value_email_controller(email)
|
72 |
+
if check is not True:
|
73 |
+
return check
|
74 |
+
return await UserService.reset_password(request)
|
75 |
+
|
76 |
+
@router.put('/change_password', tags=["User"])
|
77 |
+
async def reset_password_firebase(request:RequestUser.RequestChangePassword):
|
78 |
+
user_id = request.user_id
|
79 |
+
check = support_function.check_value_user_id_controller(user_id)
|
80 |
+
if check is not True:
|
81 |
+
return check
|
82 |
+
new_password = request.new_password
|
83 |
+
current_password = request.current_password
|
84 |
+
confirm_new_password = request.confirm_new_password
|
85 |
+
if confirm_new_password is None or confirm_new_password.strip() == "":
|
86 |
+
return res.ReponseError(status=400,
|
87 |
+
data=res.Message(message="Confirm New password field is required."))
|
88 |
+
elif new_password is None or new_password.strip() == "":
|
89 |
+
return res.ReponseError(status=400,
|
90 |
+
data=res.Message(message="New password field is required."))
|
91 |
+
elif current_password is None or current_password.strip() == "":
|
92 |
+
return res.ReponseError(status=400,
|
93 |
+
data=res.Message(message="Current password field is required."))
|
94 |
+
return await UserService.change_password(request)
|
95 |
+
|
96 |
+
|
controller/__pycache__/AuthenticationController.cpython-310.pyc
ADDED
Binary file (2.31 kB). View file
|
|
controller/__pycache__/ChatController.cpython-310.pyc
ADDED
Binary file (1.57 kB). View file
|
|
controller/__pycache__/ChatController.cpython-311.pyc
ADDED
Binary file (2.63 kB). View file
|
|
controller/__pycache__/ChatController.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Form, Request
|
2 |
+
from service import ChatService
|
3 |
+
from request import RequestChat
|
4 |
+
from typing import Optional
|
5 |
+
from fastapi.requests import Request
|
6 |
+
from function import support_function
|
7 |
+
from response import ResponseChat as res
|
8 |
+
router = APIRouter()
|
9 |
+
|
10 |
+
@router.post("/chatbot/query/", tags=["Chat"])
|
11 |
+
async def handle_query2_upgrade_old(request: Request,
|
12 |
+
user_id: str = Form(None),
|
13 |
+
text_all: str = Form(...), question: Optional[str] = Form(None),
|
14 |
+
chat_name: Optional[str] = Form(None)):
|
15 |
+
check = support_function.check_value_user_id_controller(user_id)
|
16 |
+
if check is not True:
|
17 |
+
return check
|
18 |
+
request = RequestChat.RequestQuery2UpgradeOld(user_id=user_id, text_all=text_all, question=question, chat_name=chat_name)
|
19 |
+
return ChatService.query2_upgrade_old(request)
|
20 |
+
|
21 |
+
@router.get("/chatbot/extract_file/", tags=["Chat"])
|
22 |
+
async def extract_file(user_id: str):
|
23 |
+
check = support_function.check_value_user_id_controller(user_id)
|
24 |
+
if check is not True:
|
25 |
+
return check
|
26 |
+
request = RequestChat.RequestExtractFile(user_id=user_id)
|
27 |
+
return ChatService.extract_file(request)
|
28 |
+
|
29 |
+
@router.get("/chatbot/generate_question/",tags=["Chat"])
|
30 |
+
async def generate_question(user_id: str):
|
31 |
+
check = support_function.check_value_user_id_controller(user_id)
|
32 |
+
if check is not True:
|
33 |
+
return check
|
34 |
+
request = RequestChat.RequestGenerateQuestion(user_id=user_id)
|
35 |
+
return ChatService.generate_question(request)
|
controller/__pycache__/DefaultController.cpython-310.pyc
ADDED
Binary file (3.01 kB). View file
|
|
controller/__pycache__/DefaultController.cpython-311.pyc
ADDED
Binary file (4.91 kB). View file
|
|
controller/__pycache__/FileController.cpython-310.pyc
ADDED
Binary file (2.68 kB). View file
|
|
controller/__pycache__/FileController.cpython-311.pyc
ADDED
Binary file (4.8 kB). View file
|
|
controller/__pycache__/MySQLController.cpython-310.pyc
ADDED
Binary file (3.31 kB). View file
|
|
controller/__pycache__/MySQLController.cpython-311.pyc
ADDED
Binary file (5.35 kB). View file
|
|
controller/__pycache__/OTPController.cpython-310.pyc
ADDED
Binary file (1.35 kB). View file
|
|
controller/__pycache__/OTPController.cpython-311.pyc
ADDED
Binary file (2.4 kB). View file
|
|
controller/__pycache__/UserController.cpython-310.pyc
ADDED
Binary file (3.37 kB). View file
|
|
function/__pycache__/chatbot.cpython-310.pyc
ADDED
Binary file (23.7 kB). View file
|
|
function/__pycache__/chatbot.cpython-311.pyc
ADDED
Binary file (42.3 kB). View file
|
|
function/__pycache__/dropbox.cpython-310.pyc
ADDED
Binary file (6 kB). View file
|
|
function/__pycache__/dropbox.cpython-311.pyc
ADDED
Binary file (12.4 kB). View file
|
|
function/__pycache__/support_function.cpython-310.pyc
ADDED
Binary file (4.15 kB). View file
|
|
function/__pycache__/support_function.cpython-311.pyc
ADDED
Binary file (7.76 kB). View file
|
|
function/chatbot.py
ADDED
@@ -0,0 +1,726 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.text_splitter import CharacterTextSplitter
|
2 |
+
import json
|
3 |
+
import os
|
4 |
+
import random
|
5 |
+
import re
|
6 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
7 |
+
import google.generativeai as genai
|
8 |
+
import nltk
|
9 |
+
import pandas as pd
|
10 |
+
from groq import Groq
|
11 |
+
from langchain.chains.summarize import load_summarize_chain
|
12 |
+
from langchain.docstore.document import Document
|
13 |
+
from langchain.prompts import PromptTemplate
|
14 |
+
from langchain.retrievers import BM25Retriever, EnsembleRetriever
|
15 |
+
from langchain.retrievers.contextual_compression import ContextualCompressionRetriever
|
16 |
+
from langchain.text_splitter import CharacterTextSplitter
|
17 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
18 |
+
from langchain_cohere import CohereRerank
|
19 |
+
from langchain_community.document_loaders import Docx2txtLoader
|
20 |
+
from langchain_community.document_loaders import TextLoader
|
21 |
+
from langchain_community.document_loaders import UnstructuredCSVLoader
|
22 |
+
from langchain_community.document_loaders import UnstructuredExcelLoader
|
23 |
+
from langchain_community.document_loaders import UnstructuredHTMLLoader
|
24 |
+
from langchain_community.document_loaders import UnstructuredMarkdownLoader
|
25 |
+
from langchain_community.document_loaders import UnstructuredPDFLoader
|
26 |
+
from langchain_community.document_loaders import UnstructuredPowerPointLoader
|
27 |
+
from langchain_community.document_loaders import UnstructuredXMLLoader
|
28 |
+
from langchain_community.document_loaders.csv_loader import CSVLoader
|
29 |
+
from langchain_community.llms import Cohere
|
30 |
+
from langchain_community.vectorstores import Chroma
|
31 |
+
from langchain_core.output_parsers.openai_tools import PydanticToolsParser
|
32 |
+
from langchain_core.prompts import ChatPromptTemplate
|
33 |
+
from langchain_core.pydantic_v1 import BaseModel, Field
|
34 |
+
from langchain_core.runnables import RunnablePassthrough
|
35 |
+
from langchain_openai import ChatOpenAI
|
36 |
+
from typing import List
|
37 |
+
from nltk.corpus import stopwords
|
38 |
+
from nltk.tokenize import word_tokenize
|
39 |
+
nltk.download('punkt')
|
40 |
+
|
41 |
+
def process_json_file(file_path):
|
42 |
+
json_data = []
|
43 |
+
with open(file_path, 'r') as file:
|
44 |
+
for line in file:
|
45 |
+
try:
|
46 |
+
data = json.loads(line)
|
47 |
+
json_data.append(data)
|
48 |
+
except json.JSONDecodeError:
|
49 |
+
try:
|
50 |
+
data = json.loads(line[:-1])
|
51 |
+
json_data.append(data)
|
52 |
+
except json.JSONDecodeError as e:
|
53 |
+
print(f"Error decoding JSON: {e}")
|
54 |
+
return json_data
|
55 |
+
|
56 |
+
from dotenv import load_dotenv
|
57 |
+
import os
|
58 |
+
load_dotenv()
|
59 |
+
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
60 |
+
COHERE_API_KEY = os.getenv("COHERE_API_KEY")
|
61 |
+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
62 |
+
GOOGLE_API_KEY1= os.getenv("GOOGLE_API_KEY_1")
|
63 |
+
GOOGLE_API_KEY= os.getenv("GOOGLE_API_KEY")
|
64 |
+
os.environ["COHERE_API_KEY"] = COHERE_API_KEY
|
65 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
66 |
+
client = Groq(
|
67 |
+
api_key= GROQ_API_KEY,
|
68 |
+
)
|
69 |
+
genai.configure(api_key=GOOGLE_API_KEY1)
|
70 |
+
os.environ["GOOGLE_API_KEY"] = GOOGLE_API_KEY
|
71 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings, ChatGoogleGenerativeAI
|
72 |
+
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", task_type="retrieval_document")
|
73 |
+
llm = ChatGoogleGenerativeAI(model='gemini-pro',
|
74 |
+
max_output_tokens=2048,
|
75 |
+
temperature=0.2,
|
76 |
+
convert_system_message_to_human=True)
|
77 |
+
def extract_multi_metadata_content(texts, tests):
|
78 |
+
extracted_content = []
|
79 |
+
precomputed_metadata = [x.metadata['source'].lower() for x in texts]
|
80 |
+
for idx, test in enumerate(tests):
|
81 |
+
temp_content = []
|
82 |
+
test_terms = set(test.lower().split())
|
83 |
+
for metadata_lower, x in zip(precomputed_metadata, texts):
|
84 |
+
if any(term in metadata_lower for term in test_terms):
|
85 |
+
temp_content.append(x.page_content)
|
86 |
+
if idx == 0:
|
87 |
+
extracted_content.append(f"Dữ liệu của {test}:\n{''.join(temp_content)}")
|
88 |
+
else:
|
89 |
+
extracted_content.append(''.join(temp_content))
|
90 |
+
return '\n'.join(extracted_content)
|
91 |
+
import unicodedata
|
92 |
+
def text_preprocessing(text):
|
93 |
+
text = text.lower()
|
94 |
+
emoji_pattern = re.compile("["
|
95 |
+
u"\U0001F600-\U0001F64F" # emoticons
|
96 |
+
u"\U0001F300-\U0001F5FF" # symbols & pictographs
|
97 |
+
u"\U0001F680-\U0001F6FF" # transport & map symbols
|
98 |
+
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
99 |
+
u"\U00002500-\U00002BEF" # chinese char
|
100 |
+
u"\U00002702-\U000027B0"
|
101 |
+
u"\U000024C2-\U0001F251"
|
102 |
+
u"\U0001f926-\U0001f937"
|
103 |
+
u"\U00010000-\U0010ffff"
|
104 |
+
u"\u2640-\u2642"
|
105 |
+
u"\u2600-\u2B55"
|
106 |
+
u"\u200d"
|
107 |
+
u"\u23cf"
|
108 |
+
u"\u23e9"
|
109 |
+
u"\u231a"
|
110 |
+
u"\ufe0f" # dingbats
|
111 |
+
u"\u3030"
|
112 |
+
"]+", flags=re.UNICODE)
|
113 |
+
text = emoji_pattern.sub(r'', text)
|
114 |
+
text = unicodedata.normalize('NFC', text)
|
115 |
+
words = text.split()
|
116 |
+
text = ' '.join(words)
|
117 |
+
return text
|
118 |
+
def find_matching_files_in_docs_12_id(text, id):
|
119 |
+
folder_path = f"./user_file/{id}"
|
120 |
+
search_terms = []
|
121 |
+
search_terms_old = []
|
122 |
+
matching_index = []
|
123 |
+
search_origin = re.findall(r'\b\w+\.\w+\b|\b\w+\b', text)
|
124 |
+
search_terms_origin = []
|
125 |
+
for word in search_origin:
|
126 |
+
if '.' in word:
|
127 |
+
search_terms_origin.append(word)
|
128 |
+
else:
|
129 |
+
search_terms_origin.extend(re.findall(r'\b\w+\b', word))
|
130 |
+
|
131 |
+
file_names_with_extension = re.findall(r'\b\w+\.\w+\b|\b\w+\b', text.lower())
|
132 |
+
file_names_with_extension_old = re.findall(r'\b(\w+\.\w+)\b', text)
|
133 |
+
for file_name in search_terms_origin:
|
134 |
+
if "." in file_name:
|
135 |
+
search_terms_old.append(file_name)
|
136 |
+
for file_name in file_names_with_extension_old:
|
137 |
+
if "." in file_name:
|
138 |
+
search_terms_old.append(file_name)
|
139 |
+
for file_name in file_names_with_extension:
|
140 |
+
search_terms.append(file_name)
|
141 |
+
clean_text_old = text
|
142 |
+
clean_text = text.lower()
|
143 |
+
for term in search_terms_old:
|
144 |
+
clean_text_old = clean_text_old.replace(term, '')
|
145 |
+
for term in search_terms:
|
146 |
+
clean_text = clean_text.replace(term, '')
|
147 |
+
words_old = re.findall(r'\b\w+\b', clean_text_old)
|
148 |
+
search_terms_old.extend(words_old)
|
149 |
+
matching_files = set()
|
150 |
+
for root, dirs, files in os.walk(folder_path):
|
151 |
+
for file in files:
|
152 |
+
for term in search_terms:
|
153 |
+
if term.lower() in file.lower():
|
154 |
+
term_position = search_terms.index(term)
|
155 |
+
matching_files.add(file)
|
156 |
+
matching_index.append(term_position)
|
157 |
+
break
|
158 |
+
matching_files_old1 = []
|
159 |
+
matching_index.sort()
|
160 |
+
for x in matching_index:
|
161 |
+
matching_files_old1.append(search_terms_origin[x])
|
162 |
+
return matching_files, matching_files_old1
|
163 |
+
|
164 |
+
def convert_xlsx_to_csv(xlsx_file_path, csv_file_path):
|
165 |
+
df = pd.read_excel(xlsx_file_path)
|
166 |
+
df.to_csv(csv_file_path, index=False)
|
167 |
+
|
168 |
+
def save_list_CSV_id(file_list, id):
|
169 |
+
text = ""
|
170 |
+
for x in file_list:
|
171 |
+
if x.endswith('.xlsx'):
|
172 |
+
old = f"./user_file/{id}/{x}"
|
173 |
+
new = old.replace(".xlsx", ".csv")
|
174 |
+
convert_xlsx_to_csv(old, new)
|
175 |
+
x = x.replace(".xlsx", ".csv")
|
176 |
+
loader1 = CSVLoader(f"./user_file/{id}/{x}")
|
177 |
+
docs1 = loader1.load()
|
178 |
+
text += f"Dữ liệu file {x}:\n"
|
179 |
+
for z in docs1:
|
180 |
+
text += z.page_content + "\n"
|
181 |
+
return text
|
182 |
+
|
183 |
+
def merge_files(file_set, file_list):
|
184 |
+
"""Hàm này ghép lại các tên file dựa trên điều kiện đã cho."""
|
185 |
+
merged_files = {}
|
186 |
+
for file_name in file_list:
|
187 |
+
name = file_name.split('.')[0]
|
188 |
+
for f in file_set:
|
189 |
+
if name in f:
|
190 |
+
merged_files[name] = f
|
191 |
+
break
|
192 |
+
return merged_files
|
193 |
+
|
194 |
+
def replace_keys_with_values(original_dict, replacement_dict):
|
195 |
+
new_dict = {}
|
196 |
+
for key, value in original_dict.items():
|
197 |
+
if key in replacement_dict:
|
198 |
+
new_key = replacement_dict[key]
|
199 |
+
new_dict[new_key] = value
|
200 |
+
else:
|
201 |
+
new_dict[key] = value
|
202 |
+
return new_dict
|
203 |
+
|
204 |
+
def aws1_csv_id(new_dict_csv, id):
|
205 |
+
text = ""
|
206 |
+
query_all = ""
|
207 |
+
keyword = []
|
208 |
+
for key, value in new_dict_csv.items():
|
209 |
+
print(key, value)
|
210 |
+
query_all += value
|
211 |
+
keyword.append(key)
|
212 |
+
test = save_list_CSV_id(keyword, id)
|
213 |
+
text += test
|
214 |
+
sources = ",".join(keyword)
|
215 |
+
return text, query_all, sources
|
216 |
+
|
217 |
+
def chat_llama3(prompt_query):
|
218 |
+
try:
|
219 |
+
chat_completion = client.chat.completions.create(
|
220 |
+
messages=[
|
221 |
+
{
|
222 |
+
"role": "system",
|
223 |
+
"content": "Bạn là một trợ lý trung thưc, trả lời dựa trên nội dung tài liệu được cung cấp. Chỉ trả lời liên quan đến câu hỏi một cách đầy đủ chính xác, không bỏ sót thông tin."
|
224 |
+
},
|
225 |
+
{
|
226 |
+
|
227 |
+
"role": "user",
|
228 |
+
"content": f"{prompt_query}",
|
229 |
+
}
|
230 |
+
],
|
231 |
+
model="llama3-70b-8192",
|
232 |
+
temperature=0.0,
|
233 |
+
max_tokens=9000,
|
234 |
+
stop=None,
|
235 |
+
stream=False,
|
236 |
+
)
|
237 |
+
return chat_completion.choices[0].message.content
|
238 |
+
except Exception as error:
|
239 |
+
return False
|
240 |
+
|
241 |
+
def chat_gemini(prompt):
|
242 |
+
generation_config = {
|
243 |
+
"temperature": 0.0,
|
244 |
+
"top_p": 0.0,
|
245 |
+
"top_k": 0,
|
246 |
+
"max_output_tokens": 8192,
|
247 |
+
}
|
248 |
+
safety_settings = [
|
249 |
+
{
|
250 |
+
"category": "HARM_CATEGORY_HARASSMENT",
|
251 |
+
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
|
252 |
+
},
|
253 |
+
{
|
254 |
+
"category": "HARM_CATEGORY_HATE_SPEECH",
|
255 |
+
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
|
256 |
+
},
|
257 |
+
{
|
258 |
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
259 |
+
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
|
260 |
+
},
|
261 |
+
{
|
262 |
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
263 |
+
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
|
264 |
+
},
|
265 |
+
]
|
266 |
+
model = genai.GenerativeModel(model_name="gemini-1.5-pro-latest",
|
267 |
+
generation_config=generation_config,
|
268 |
+
safety_settings=safety_settings)
|
269 |
+
convo = model.start_chat(history=[])
|
270 |
+
convo.send_message(prompt)
|
271 |
+
return convo.last.text
|
272 |
+
|
273 |
+
def question_answer(question):
|
274 |
+
completion = chat_llama3(question)
|
275 |
+
if completion:
|
276 |
+
return completion
|
277 |
+
else:
|
278 |
+
answer = chat_gemini(question)
|
279 |
+
return answer
|
280 |
+
|
281 |
+
def check_persist_directory(id, file_name):
|
282 |
+
directory_path = f"./vector_database/{id}/{file_name}"
|
283 |
+
return os.path.exists(directory_path)
|
284 |
+
|
285 |
+
from langchain_community.vectorstores import FAISS
|
286 |
+
|
287 |
+
def check_path_exists(path):
|
288 |
+
return os.path.exists(path)
|
289 |
+
def aws1_all_id(new_dict, text_alls, id, thread_id):
|
290 |
+
answer = ""
|
291 |
+
COHERE_API_KEY1 = os.getenv("COHERE_API_KEY_1")
|
292 |
+
os.environ["COHERE_API_KEY"] = COHERE_API_KEY1
|
293 |
+
answer_relevant = ""
|
294 |
+
directory = ""
|
295 |
+
for key, value in new_dict.items():
|
296 |
+
query = value
|
297 |
+
query = text_preprocessing(query)
|
298 |
+
keyword, keyword2 = find_matching_files_in_docs_12_id(query, id)
|
299 |
+
data = extract_multi_metadata_content(text_alls, keyword)
|
300 |
+
if keyword:
|
301 |
+
file_name = next(iter(keyword))
|
302 |
+
text_splitter = CharacterTextSplitter(chunk_size=3200, chunk_overlap=1500)
|
303 |
+
texts_data = text_splitter.split_text(data)
|
304 |
+
|
305 |
+
if check_persist_directory(id, file_name):
|
306 |
+
vectordb_query = Chroma(persist_directory=f"./vector_database/{id}/{file_name}", embedding_function=embeddings)
|
307 |
+
else:
|
308 |
+
vectordb_query = Chroma.from_texts(texts_data,
|
309 |
+
embedding=embeddings,
|
310 |
+
persist_directory=f"./vector_database/{id}/{file_name}")
|
311 |
+
|
312 |
+
k_1 = len(texts_data)
|
313 |
+
retriever = vectordb_query.as_retriever(search_kwargs={f"k": k_1})
|
314 |
+
bm25_retriever = BM25Retriever.from_texts(texts_data)
|
315 |
+
bm25_retriever.k = k_1
|
316 |
+
ensemble_retriever = EnsembleRetriever(retrievers=[bm25_retriever, retriever],
|
317 |
+
weights=[0.6, 0.4])
|
318 |
+
docs = ensemble_retriever.get_relevant_documents(f"{query}")
|
319 |
+
|
320 |
+
path = f"./vector_database/FAISS/{id}/{file_name}"
|
321 |
+
if check_path_exists(path):
|
322 |
+
docsearch = FAISS.load_local(path, embeddings, allow_dangerous_deserialization=True)
|
323 |
+
else:
|
324 |
+
docsearch = FAISS.from_documents(docs, embeddings)
|
325 |
+
docsearch.save_local(f"./vector_database/FAISS/{id}/{file_name}")
|
326 |
+
docsearch = FAISS.load_local(path, embeddings, allow_dangerous_deserialization=True)
|
327 |
+
|
328 |
+
k_2 = len(docs)
|
329 |
+
compressor = CohereRerank(top_n=3)
|
330 |
+
retrieve3 = docsearch.as_retriever(search_kwargs={f"k": k_2})
|
331 |
+
compression_retriever = ContextualCompressionRetriever(
|
332 |
+
base_compressor=compressor, base_retriever=retrieve3
|
333 |
+
)
|
334 |
+
compressed_docs = compression_retriever.get_relevant_documents(f"{query}")
|
335 |
+
|
336 |
+
if compressed_docs:
|
337 |
+
data = compressed_docs[0].page_content
|
338 |
+
text = ''.join(map(lambda x: x.page_content, compressed_docs))
|
339 |
+
prompt_document = f"Dựa vào nội dung sau:{text}. Hãy trả lời câu hỏi sau đây: {query}. Mà không thay đổi nội dung mà mình đã cung cấp. Cuối cùng nếu câu hỏi sử dụng tiếng Việt thì phải trả lời bằng Vietnamese. Nếu câu hỏi sử dụng tiếng Anh phải trả lời bằng English"
|
340 |
+
answer_for = question_answer(prompt_document)
|
341 |
+
answer += answer_for + "\n"
|
342 |
+
answer_relevant = data
|
343 |
+
directory = file_name
|
344 |
+
|
345 |
+
return answer, answer_relevant, directory
|
346 |
+
|
347 |
+
|
348 |
+
def extract_content_between_keywords(query, keywords):
|
349 |
+
contents = {}
|
350 |
+
num_keywords = len(keywords)
|
351 |
+
keyword_positions = []
|
352 |
+
for i in range(num_keywords):
|
353 |
+
keyword = keywords[i]
|
354 |
+
keyword_position = query.find(keyword)
|
355 |
+
keyword_positions.append(keyword_position)
|
356 |
+
if keyword_position == -1:
|
357 |
+
continue
|
358 |
+
next_keyword_position = len(query)
|
359 |
+
for j in range(i + 1, num_keywords):
|
360 |
+
next_keyword = keywords[j]
|
361 |
+
next_keyword_position = query.find(next_keyword)
|
362 |
+
if next_keyword_position != -1:
|
363 |
+
break
|
364 |
+
if i == 0:
|
365 |
+
content_before = query[:keyword_position].strip()
|
366 |
+
else:
|
367 |
+
content_before = query[keyword_positions[i - 1] + len(keywords[i - 1]):keyword_position].strip()
|
368 |
+
if i == num_keywords - 1:
|
369 |
+
content_after = query[keyword_position + len(keyword):].strip()
|
370 |
+
else:
|
371 |
+
content_after = query[keyword_position + len(keyword):next_keyword_position].strip()
|
372 |
+
content = f"{content_before} {keyword} {content_after}"
|
373 |
+
contents[keyword] = content
|
374 |
+
return contents
|
375 |
+
|
376 |
+
def generate_random_questions(filtered_ques_list):
|
377 |
+
if len(filtered_ques_list) >= 2:
|
378 |
+
random_questions = random.sample(filtered_ques_list, 2)
|
379 |
+
else:
|
380 |
+
random_questions = filtered_ques_list
|
381 |
+
return random_questions
|
382 |
+
|
383 |
+
def generate_question_main(loader, name_file):
|
384 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=4500, chunk_overlap=2500)
|
385 |
+
texts = text_splitter.split_documents(loader)
|
386 |
+
question_gen = f"nội dung {name_file} : \n"
|
387 |
+
question_gen += texts[0].page_content
|
388 |
+
splitter_ques_gen = RecursiveCharacterTextSplitter(
|
389 |
+
chunk_size=4500,
|
390 |
+
chunk_overlap=2200
|
391 |
+
)
|
392 |
+
chunks_ques_gen = splitter_ques_gen.split_text(question_gen)
|
393 |
+
document_ques_gen = [Document(page_content=t) for t in chunks_ques_gen]
|
394 |
+
llm_ques_gen_pipeline = llm
|
395 |
+
prompt_template_vn = """
|
396 |
+
Bạn là một chuyên gia tạo câu hỏi dựa trên tài liệu và tài liệu hướng dẫn.
|
397 |
+
Bạn làm điều này bằng cách đặt các câu hỏi về đoạn văn bản dưới đây:
|
398 |
+
|
399 |
+
------------
|
400 |
+
{text}
|
401 |
+
------------
|
402 |
+
|
403 |
+
Hãy tạo ra các câu hỏi từ đoạn văn bản này.Nếu đoạn văn là tiếng Việt hãy tạo câu hỏi tiếng Việt. Nếu đoạn văn là tiếng Anh hãy tạo câu hỏi tiếng Anh.
|
404 |
+
Hãy chắc chắn không bỏ sót bất kỳ thông tin quan trọng nào. Và chỉ tạo với đoạn tài liệu đó tối đa 5 câu hỏi liên quan tới tài liệu cung cấp nhất.Nếu trong đoạn tài liệu có các tên liên quan đến file như demo1.pdf( nhiều file khác) thì phải kèm nó vào nội dung câu hỏi bạn tạo ra.
|
405 |
+
|
406 |
+
CÁC CÂU HỎI:
|
407 |
+
"""
|
408 |
+
|
409 |
+
PROMPT_QUESTIONS_VN = PromptTemplate(template=prompt_template_vn, input_variables=["text"])
|
410 |
+
refine_template_vn = ("""
|
411 |
+
Bạn là một chuyên gia tạo câu hỏi thực hành dựa trên tài liệu và tài liệu hướng dẫn.
|
412 |
+
Mục tiêu của bạn là giúp người học chuẩn bị cho một kỳ thi.
|
413 |
+
Chúng tôi đã nhận được một số câu hỏi thực hành ở mức độ nào đó: {existing_answer}.
|
414 |
+
Chúng tôi có thể tinh chỉnh các câu hỏi hiện có hoặc thêm câu hỏi mới
|
415 |
+
(chỉ khi cần thiết) với một số ngữ cảnh bổ sung dưới đây.
|
416 |
+
------------
|
417 |
+
{text}
|
418 |
+
------------
|
419 |
+
|
420 |
+
Dựa trên ngữ cảnh mới, hãy tinh chỉnh các câu hỏi bằng tiếng Việt nếu đoạn văn đó cung cấp tiếng Việt. Nếu không hãy tinh chỉnh câu hỏi bằng tiếng Anh nếu đoạn đó cung cấp tiếng Anh.
|
421 |
+
Nếu ngữ cảnh không hữu ích, vui lòng cung cấp các câu hỏi gốc. Và chỉ tạo với đoạn tài liệu đó tối đa 5 câu hỏi liên quan tới tài liệu cung cấp nhất. Nếu trong đoạn tài liệu có các tên file thì phải kèm nó vào câu hỏi.
|
422 |
+
CÁC CÂU HỎI:
|
423 |
+
"""
|
424 |
+
)
|
425 |
+
|
426 |
+
REFINE_PROMPT_QUESTIONS = PromptTemplate(
|
427 |
+
input_variables=["existing_answer", "text"],
|
428 |
+
template=refine_template_vn,
|
429 |
+
)
|
430 |
+
ques_gen_chain = load_summarize_chain(llm=llm_ques_gen_pipeline,
|
431 |
+
chain_type="refine",
|
432 |
+
verbose=True,
|
433 |
+
question_prompt=PROMPT_QUESTIONS_VN,
|
434 |
+
refine_prompt=REFINE_PROMPT_QUESTIONS)
|
435 |
+
ques = ques_gen_chain.run(document_ques_gen)
|
436 |
+
ques_list = ques.split("\n")
|
437 |
+
filtered_ques_list = ["{}: {}".format(name_file, re.sub(r'^\d+\.\s*', '', element)) for element in ques_list if
|
438 |
+
element.endswith('?') or element.endswith('.')]
|
439 |
+
return generate_random_questions(filtered_ques_list)
|
440 |
+
|
441 |
+
def load_file(loader):
|
442 |
+
return loader.load()
|
443 |
+
|
444 |
+
def extract_data2(id):
|
445 |
+
documents = []
|
446 |
+
directory_path = f"./user_file/{id}"
|
447 |
+
if not os.path.exists(directory_path) or not any(
|
448 |
+
os.path.isfile(os.path.join(directory_path, f)) for f in os.listdir(directory_path)):
|
449 |
+
return False
|
450 |
+
tasks = []
|
451 |
+
with ThreadPoolExecutor() as executor:
|
452 |
+
for file in os.listdir(directory_path):
|
453 |
+
if file.endswith(".pdf"):
|
454 |
+
pdf_path = os.path.join(directory_path, file)
|
455 |
+
loader = UnstructuredPDFLoader(pdf_path)
|
456 |
+
tasks.append(executor.submit(load_file, loader))
|
457 |
+
elif file.endswith('.docx') or file.endswith('.doc'):
|
458 |
+
doc_path = os.path.join(directory_path, file)
|
459 |
+
loader = Docx2txtLoader(doc_path)
|
460 |
+
tasks.append(executor.submit(load_file, loader))
|
461 |
+
elif file.endswith('.txt'):
|
462 |
+
txt_path = os.path.join(directory_path, file)
|
463 |
+
loader = TextLoader(txt_path, encoding="utf8")
|
464 |
+
tasks.append(executor.submit(load_file, loader))
|
465 |
+
elif file.endswith('.pptx'):
|
466 |
+
ppt_path = os.path.join(directory_path, file)
|
467 |
+
loader = UnstructuredPowerPointLoader(ppt_path)
|
468 |
+
tasks.append(executor.submit(load_file, loader))
|
469 |
+
elif file.endswith('.csv'):
|
470 |
+
csv_path = os.path.join(directory_path, file)
|
471 |
+
loader = UnstructuredCSVLoader(csv_path)
|
472 |
+
tasks.append(executor.submit(load_file, loader))
|
473 |
+
elif file.endswith('.xlsx'):
|
474 |
+
excel_path = os.path.join(directory_path, file)
|
475 |
+
loader = UnstructuredExcelLoader(excel_path)
|
476 |
+
tasks.append(executor.submit(load_file, loader))
|
477 |
+
elif file.endswith('.json'):
|
478 |
+
json_path = os.path.join(directory_path, file)
|
479 |
+
loader = TextLoader(json_path)
|
480 |
+
tasks.append(executor.submit(load_file, loader))
|
481 |
+
elif file.endswith('.md'):
|
482 |
+
md_path = os.path.join(directory_path, file)
|
483 |
+
loader = UnstructuredMarkdownLoader(md_path)
|
484 |
+
tasks.append(executor.submit(load_file, loader))
|
485 |
+
for future in as_completed(tasks):
|
486 |
+
result = future.result()
|
487 |
+
documents.extend(result)
|
488 |
+
text_splitter = CharacterTextSplitter(chunk_size=4500, chunk_overlap=2500
|
489 |
+
)
|
490 |
+
texts = text_splitter.split_documents(documents)
|
491 |
+
Chroma.from_documents(documents=texts,
|
492 |
+
embedding=embeddings,
|
493 |
+
persist_directory=f"./vector_database/{id}")
|
494 |
+
return texts
|
495 |
+
|
496 |
+
def generate_question(id):
|
497 |
+
directory_path = f"./user_file/{id}"
|
498 |
+
if not os.path.exists(directory_path) or not any(
|
499 |
+
os.path.isfile(os.path.join(directory_path, f)) for f in os.listdir(directory_path)):
|
500 |
+
return False
|
501 |
+
all_questions = []
|
502 |
+
tasks = []
|
503 |
+
with ThreadPoolExecutor() as executor:
|
504 |
+
for file in os.listdir(directory_path):
|
505 |
+
if file.endswith(".pdf"):
|
506 |
+
pdf_path = os.path.join(directory_path, file)
|
507 |
+
loader = UnstructuredPDFLoader(pdf_path).load()
|
508 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
509 |
+
elif file.endswith('.docx') or file.endswith('.doc'):
|
510 |
+
doc_path = os.path.join(directory_path, file)
|
511 |
+
loader = Docx2txtLoader(doc_path).load()
|
512 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
513 |
+
elif file.endswith('.txt'):
|
514 |
+
txt_path = os.path.join(directory_path, file)
|
515 |
+
loader = TextLoader(txt_path, encoding="utf8").load()
|
516 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
517 |
+
elif file.endswith('.pptx'):
|
518 |
+
ppt_path = os.path.join(directory_path, file)
|
519 |
+
loader = UnstructuredPowerPointLoader(ppt_path).load()
|
520 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
521 |
+
elif file.endswith('.json'):
|
522 |
+
json_path = os.path.join(directory_path, file)
|
523 |
+
loader = TextLoader(json_path, encoding="utf8").load()
|
524 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
525 |
+
elif file.endswith('.md'):
|
526 |
+
md_path = os.path.join(directory_path, file)
|
527 |
+
loader = UnstructuredMarkdownLoader(md_path).load()
|
528 |
+
tasks.append(executor.submit(generate_question_main, loader, file))
|
529 |
+
for future in as_completed(tasks):
|
530 |
+
result = future.result()
|
531 |
+
all_questions.extend(result)
|
532 |
+
return all_questions
|
533 |
+
|
534 |
+
class Search(BaseModel):
|
535 |
+
queries: List[str] = Field(
|
536 |
+
...,
|
537 |
+
description="Truy vấn riêng biệt để tìm kiếm, giữ nguyên ý chính câu hỏi riêng biệt",
|
538 |
+
)
|
539 |
+
|
540 |
+
def query_analyzer(query):
|
541 |
+
output_parser = PydanticToolsParser(tools=[Search])
|
542 |
+
system = """Bạn có khả năng đưa ra các truy vấn tìm kiếm chính xác để lấy thông tin giúp trả lời các yêu cầu của người dùng. Các truy vấn của bạn phải chính xác, không được bỏ ngắn rút gọn.
|
543 |
+
Nếu bạn cần tra cứu hai ho���c nhiều thông tin riêng biệt, bạn có thể làm điều đó!. Trả lời câu hỏi bằng tiếng Việt(Vietnamese), không được dùng ngôn ngữ khác"""
|
544 |
+
prompt = ChatPromptTemplate.from_messages(
|
545 |
+
[
|
546 |
+
("system", system),
|
547 |
+
("human", "{question}"),
|
548 |
+
]
|
549 |
+
)
|
550 |
+
llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0.0)
|
551 |
+
structured_llm = llm.with_structured_output(Search)
|
552 |
+
query_analyzer = {"question": RunnablePassthrough()} | prompt | structured_llm
|
553 |
+
text = query_analyzer.invoke(query)
|
554 |
+
return text
|
555 |
+
|
556 |
+
def handle_query(question, text_all, compression_retriever, id, thread_id):
|
557 |
+
COHERE_API_KEY_3 = os.environ["COHERE_API_KEY_3"]
|
558 |
+
os.environ["COHERE_API_KEY"] = COHERE_API_KEY_3
|
559 |
+
query = question
|
560 |
+
x = query
|
561 |
+
keyword, key_words_old = find_matching_files_in_docs_12_id(query, id)
|
562 |
+
# if keyword == set() or key_words_old == list():
|
563 |
+
# return "Not found file"
|
564 |
+
file_list = keyword
|
565 |
+
|
566 |
+
if file_list:
|
567 |
+
list_keywords2 = list(key_words_old)
|
568 |
+
contents1 = extract_content_between_keywords(query, list_keywords2)
|
569 |
+
merged_result = merge_files(keyword, list_keywords2)
|
570 |
+
original_dict = contents1
|
571 |
+
replacement_dict = merged_result
|
572 |
+
new_dict = replace_keys_with_values(original_dict, replacement_dict)
|
573 |
+
files_to_remove = [filename for filename in new_dict.keys() if
|
574 |
+
filename.endswith('.xlsx') or filename.endswith('.csv')]
|
575 |
+
removed_files = {}
|
576 |
+
for filename in files_to_remove:
|
577 |
+
removed_files[filename] = new_dict[filename]
|
578 |
+
for filename in files_to_remove:
|
579 |
+
new_dict.pop(filename)
|
580 |
+
test_csv = ""
|
581 |
+
text_csv, query_csv, source = aws1_csv_id(removed_files, id)
|
582 |
+
prompt_csv = ""
|
583 |
+
answer_csv = ""
|
584 |
+
if test_csv:
|
585 |
+
prompt_csv = f"Dựa vào nội dung sau: {text_csv}. Hãy trả lời câu hỏi sau đây: {query_csv}.Bằng tiếng Việt"
|
586 |
+
answer_csv = question_answer(prompt_csv)
|
587 |
+
answer_document, data_relevant, source = aws1_all_id(new_dict, text_all, id, thread_id)
|
588 |
+
answer_all1 = answer_document + answer_csv
|
589 |
+
return answer_all1, data_relevant, source
|
590 |
+
else:
|
591 |
+
compressed_docs = compression_retriever.get_relevant_documents(f"{query}")
|
592 |
+
relevance_score_float = float(compressed_docs[0].metadata['relevance_score'])
|
593 |
+
print(relevance_score_float)
|
594 |
+
if relevance_score_float <= 0.12:
|
595 |
+
documents1 = []
|
596 |
+
for file in os.listdir(f"./user_file/{id}"):
|
597 |
+
if file.endswith('.csv'):
|
598 |
+
csv_path = f"./user_file/{id}/" + file
|
599 |
+
loader = UnstructuredCSVLoader(csv_path)
|
600 |
+
documents1.extend(loader.load())
|
601 |
+
elif file.endswith('.xlsx'):
|
602 |
+
excel_path = f"./user_file/{id}/" + file
|
603 |
+
loader = UnstructuredExcelLoader(excel_path)
|
604 |
+
documents1.extend(loader.load())
|
605 |
+
text_splitter_csv = CharacterTextSplitter.from_tiktoken_encoder(chunk_size=2200, chunk_overlap=1500)
|
606 |
+
texts_csv = text_splitter_csv.split_documents(documents1)
|
607 |
+
vectordb_csv = Chroma.from_documents(documents=texts_csv,
|
608 |
+
embedding=embeddings, persist_directory=f'./vector_database/csv/{thread_id}')
|
609 |
+
k = len(texts_csv)
|
610 |
+
retriever_csv = vectordb_csv.as_retriever(search_kwargs={"k": k})
|
611 |
+
llm = Cohere(temperature=0)
|
612 |
+
compressor_csv = CohereRerank(top_n=3, model="rerank-english-v2.0")
|
613 |
+
compression_retriever_csv = ContextualCompressionRetriever(
|
614 |
+
base_compressor=compressor_csv, base_retriever=retriever_csv
|
615 |
+
)
|
616 |
+
compressed_docs_csv = compression_retriever_csv.get_relevant_documents(f"{query}")
|
617 |
+
file_path = compressed_docs_csv[0].metadata['source']
|
618 |
+
print(file_path)
|
619 |
+
if file_path.endswith('.xlsx'):
|
620 |
+
new = file_path.replace(".xlsx", ".csv")
|
621 |
+
convert_xlsx_to_csv(file_path, new)
|
622 |
+
loader1 = CSVLoader(new)
|
623 |
+
else:
|
624 |
+
loader1 = CSVLoader(file_path)
|
625 |
+
docs1 = loader1.load()
|
626 |
+
text = " "
|
627 |
+
for z in docs1:
|
628 |
+
text += z.page_content + "\n"
|
629 |
+
prompt_csv = f"Dựa vào nội dung sau: {text}. Hãy trả lời câu hỏi sau đây: {query}. Bằng tiếng Việt"
|
630 |
+
answer_csv = question_answer(prompt_csv)
|
631 |
+
return answer_csv
|
632 |
+
else:
|
633 |
+
file_path = compressed_docs[0].metadata['source']
|
634 |
+
file_path = file_path.replace('\\', '/')
|
635 |
+
print(file_path)
|
636 |
+
if file_path.endswith(".pdf"):
|
637 |
+
loader = UnstructuredPDFLoader(file_path)
|
638 |
+
elif file_path.endswith('.docx') or file_path.endswith('doc'):
|
639 |
+
loader = Docx2txtLoader(file_path)
|
640 |
+
elif file_path.endswith('.txt'):
|
641 |
+
loader = TextLoader(file_path, encoding="utf8")
|
642 |
+
elif file_path.endswith('.pptx'):
|
643 |
+
loader = UnstructuredPowerPointLoader(file_path)
|
644 |
+
elif file_path.endswith('.xml'):
|
645 |
+
loader = UnstructuredXMLLoader(file_path)
|
646 |
+
elif file_path.endswith('.html'):
|
647 |
+
loader = UnstructuredHTMLLoader(file_path)
|
648 |
+
elif file_path.endswith('.json'):
|
649 |
+
loader = TextLoader(file_path)
|
650 |
+
elif file_path.endswith('.md'):
|
651 |
+
loader = UnstructuredMarkdownLoader(file_path)
|
652 |
+
elif file_path.endswith('.xlsx'):
|
653 |
+
file_path_new = file_path.replace(".xlsx", ".csv")
|
654 |
+
convert_xlsx_to_csv(file_path, file_path_new)
|
655 |
+
loader = CSVLoader(file_path_new)
|
656 |
+
elif file_path.endswith('.csv'):
|
657 |
+
loader = CSVLoader(file_path)
|
658 |
+
text_splitter = CharacterTextSplitter(chunk_size=3200, chunk_overlap=1500)
|
659 |
+
texts = text_splitter.split_documents(loader.load())
|
660 |
+
k_1 = len(texts)
|
661 |
+
file_name = os.path.basename(file_path)
|
662 |
+
if check_persist_directory(id, file_name):
|
663 |
+
vectordb_file = Chroma(persist_directory=f"./vector_database/{id}/{file_name}",
|
664 |
+
embedding_function=embeddings)
|
665 |
+
else:
|
666 |
+
vectordb_file = Chroma.from_documents(texts,
|
667 |
+
embedding=embeddings,
|
668 |
+
persist_directory=f"./vector_database/{id}/{file_name}")
|
669 |
+
retriever_file = vectordb_file.as_retriever(search_kwargs={f"k": k_1})
|
670 |
+
bm25_retriever = BM25Retriever.from_documents(texts)
|
671 |
+
bm25_retriever.k = k_1
|
672 |
+
ensemble_retriever = EnsembleRetriever(retrievers=[bm25_retriever, retriever_file],
|
673 |
+
weights=[0.6, 0.4])
|
674 |
+
docs = ensemble_retriever.get_relevant_documents(f"{query}")
|
675 |
+
|
676 |
+
path = f"./vector_database/FAISS/{id}/{file_name}"
|
677 |
+
if check_path_exists(path):
|
678 |
+
docsearch = FAISS.load_local(path, embeddings, allow_dangerous_deserialization=True)
|
679 |
+
else:
|
680 |
+
docsearch = FAISS.from_documents(docs, embeddings)
|
681 |
+
docsearch.save_local(f"./vector_database/FAISS/{id}/{file_name}")
|
682 |
+
docsearch = FAISS.load_local(path, embeddings, allow_dangerous_deserialization=True)
|
683 |
+
k_2 = len(docs)
|
684 |
+
retrieve3 = docsearch.as_retriever(search_kwargs={f"k": k_2})
|
685 |
+
compressor_file = CohereRerank(top_n=3, model="rerank-english-v2.0")
|
686 |
+
compression_retriever_file = ContextualCompressionRetriever(
|
687 |
+
base_compressor=compressor_file, base_retriever=retrieve3
|
688 |
+
)
|
689 |
+
compressed_docs_file = compression_retriever_file.get_relevant_documents(f"{x}")
|
690 |
+
query = question
|
691 |
+
text = ''.join(map(lambda x: x.page_content, compressed_docs_file))
|
692 |
+
prompt = f"Dựa vào nội dung sau:{text}. Hãy trả lời câu hỏi sau đây: {query}. Mà không thay đổi, chỉnh sửa nội dung mà mình đã cung cấp"
|
693 |
+
answer = question_answer(prompt)
|
694 |
+
list_relevant = compressed_docs_file[0].page_content
|
695 |
+
source = file_name
|
696 |
+
return answer, list_relevant, source
|
697 |
+
import concurrent.futures
|
698 |
+
def handle_query_upgrade_keyword_old(query_all, text_all, id,chat_history):
|
699 |
+
COHERE_API_KEY_2 = os.environ["COHERE_API_KEY_2"]
|
700 |
+
os.environ["COHERE_API_KEY"] = COHERE_API_KEY_2
|
701 |
+
test = query_analyzer(query_all)
|
702 |
+
test_string = str(test)
|
703 |
+
matches = re.findall(r"'([^']*)'", test_string)
|
704 |
+
vectordb = Chroma(persist_directory=f"./vector_database/{id}", embedding_function=embeddings)
|
705 |
+
k = len(text_all)
|
706 |
+
retriever = vectordb.as_retriever(search_kwargs={"k": k})
|
707 |
+
compressor = CohereRerank(top_n=5, model="rerank-english-v2.0")
|
708 |
+
compression_retriever = ContextualCompressionRetriever(base_compressor=compressor, base_retriever= retriever)
|
709 |
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
710 |
+
futures = {executor.submit(handle_query, query, text_all, compression_retriever, id, i): query for i, query in
|
711 |
+
enumerate(matches)}
|
712 |
+
results = []
|
713 |
+
data_relevant = []
|
714 |
+
sources = []
|
715 |
+
for future in as_completed(futures):
|
716 |
+
try:
|
717 |
+
result, list_data, list_source = future.result()
|
718 |
+
results.append(result)
|
719 |
+
data_relevant.append(list_data)
|
720 |
+
sources.append(list_source)
|
721 |
+
except Exception as e:
|
722 |
+
print(f'An error occurred: {e}')
|
723 |
+
answer_all = ''.join(results)
|
724 |
+
prompt1 = f"Dựa vào nội dung sau: {answer_all}. Hãy trả lời câu hỏi sau đây: {query_all}. Lưu ý rằng ngữ cảnh của cuộc trò chuyện này trước đây là: {chat_history}. Vui lòng trả lời câu hỏi mà không thay đổi, chỉnh sửa nội dung mà mình đã cung cấp."
|
725 |
+
answer1 = question_answer(prompt1)
|
726 |
+
return answer1, data_relevant, sources
|
function/dropbox.py
ADDED
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import dropbox.files
|
2 |
+
import os
|
3 |
+
import shutil
|
4 |
+
import requests, base64
|
5 |
+
from fastapi import HTTPException
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
import os
|
8 |
+
load_dotenv()
|
9 |
+
DROPBOX_APP_KEY=os.getenv('DROPBOX_APP_KEY')
|
10 |
+
DROPBOX_APP_SECRET=os.getenv('DROPBOX_APP_SECRET')
|
11 |
+
DROPBOX_REFRESH_TOKEN=os.getenv('DROPBOX_REFRESH_TOKEN')
|
12 |
+
|
13 |
+
def refresh_token_dropbox():
|
14 |
+
app_key = DROPBOX_APP_KEY
|
15 |
+
app_secret = DROPBOX_APP_SECRET
|
16 |
+
refresh_token = DROPBOX_REFRESH_TOKEN
|
17 |
+
url = 'https://api.dropbox.com/oauth2/token'
|
18 |
+
auth_string = f"{app_key}:{app_secret}"
|
19 |
+
base64authorization = base64.b64encode(auth_string.encode()).decode('utf-8')
|
20 |
+
headers = {
|
21 |
+
'Authorization': f'Basic {base64authorization}',
|
22 |
+
'Content-Type': 'application/x-www-form-urlencoded'
|
23 |
+
}
|
24 |
+
data = {
|
25 |
+
'refresh_token': refresh_token,
|
26 |
+
'grant_type': 'refresh_token'
|
27 |
+
}
|
28 |
+
response = requests.post(url, headers=headers, data=data)
|
29 |
+
response_json = response.json()
|
30 |
+
access_token = response_json.get('access_token', None)
|
31 |
+
return access_token
|
32 |
+
|
33 |
+
def delete_file(id,name_file):
|
34 |
+
try:
|
35 |
+
TOKEN = refresh_token_dropbox()
|
36 |
+
dbx=dropbox.Dropbox(TOKEN)
|
37 |
+
file_path = f"/{id}/{name_file}"
|
38 |
+
dbx.files_delete_v2(file_path)
|
39 |
+
print(f"Xóa file '{file_path}' thành công.")
|
40 |
+
except dropbox.exceptions.ApiError as e:
|
41 |
+
print(f"Lỗi khi xóa file '{file_path}': {e}")
|
42 |
+
|
43 |
+
def list_files(id):
|
44 |
+
file_names = []
|
45 |
+
try:
|
46 |
+
TOKEN = refresh_token_dropbox()
|
47 |
+
dbx=dropbox.Dropbox(TOKEN)
|
48 |
+
result = dbx.files_list_folder(f"/{id}")
|
49 |
+
for entry in result.entries:
|
50 |
+
if isinstance(entry, dropbox.files.FileMetadata):
|
51 |
+
file_names.append(os.path.basename(entry.path_display))
|
52 |
+
except dropbox.exceptions.ApiError as e:
|
53 |
+
print(f"Error listing files: {e}")
|
54 |
+
return file_names
|
55 |
+
|
56 |
+
def upload_file_fix(local_path,cloud_path,token):
|
57 |
+
try:
|
58 |
+
TOKEN = refresh_token_dropbox()
|
59 |
+
dbx=dropbox.Dropbox(TOKEN)
|
60 |
+
with open(local_path, "rb") as f:
|
61 |
+
data = f.read()
|
62 |
+
dbx.files_upload(data, cloud_path)
|
63 |
+
print(f"Uploaded file '{local_path}' to '{cloud_path}'")
|
64 |
+
except dropbox.exceptions.ApiError as e:
|
65 |
+
print(f"Error uploading file '{local_path}': {e}")
|
66 |
+
|
67 |
+
def upload_file(local_path, cloud_path):
|
68 |
+
try:
|
69 |
+
TOKEN = refresh_token_dropbox()
|
70 |
+
dbx=dropbox.Dropbox(TOKEN)
|
71 |
+
with open(local_path, "rb") as f:
|
72 |
+
data = f.read()
|
73 |
+
dbx.files_upload(data, cloud_path)
|
74 |
+
print(f"Uploaded file '{local_path}' to '{cloud_path}'")
|
75 |
+
except dropbox.exceptions.ApiError as e:
|
76 |
+
upload_file_fix()
|
77 |
+
|
78 |
+
def clear_local_folder(path):
|
79 |
+
try:
|
80 |
+
for filename in os.listdir(path):
|
81 |
+
file_path = os.path.join(path, filename)
|
82 |
+
if os.path.isfile(file_path) or os.path.islink(file_path):
|
83 |
+
os.unlink(file_path)
|
84 |
+
elif os.path.isdir(file_path):
|
85 |
+
shutil.rmtree(file_path)
|
86 |
+
except Exception as e:
|
87 |
+
print(f"Failed to delete contents of {path}. Reason: {e}")
|
88 |
+
|
89 |
+
def download_folder(id):
|
90 |
+
try:
|
91 |
+
TOKEN = refresh_token_dropbox()
|
92 |
+
dbx = dropbox.Dropbox(TOKEN)
|
93 |
+
local_path = f"./user_file/{id}"
|
94 |
+
os.makedirs(local_path, exist_ok=True)
|
95 |
+
clear_local_folder(local_path)
|
96 |
+
result = dbx.files_list_folder(f"/{id}")
|
97 |
+
for entry in result.entries:
|
98 |
+
if isinstance(entry, dropbox.files.FileMetadata):
|
99 |
+
cloud_file_path = entry.path_display
|
100 |
+
file_name = os.path.basename(cloud_file_path)
|
101 |
+
local_file_path = os.path.join(local_path, file_name)
|
102 |
+
dbx.files_download_to_file(local_file_path, cloud_file_path)
|
103 |
+
print(f"Downloaded file '{file_name}' to '{local_file_path}'")
|
104 |
+
except dropbox.exceptions.ApiError as e:
|
105 |
+
print(f"Error downloading file '{id}': {e}")
|
106 |
+
|
107 |
+
def download_file_id(file_name, id):
|
108 |
+
try:
|
109 |
+
TOKEN = refresh_token_dropbox()
|
110 |
+
dbx = dropbox.Dropbox(TOKEN)
|
111 |
+
local_folder_path = f"./user_file/{id}"
|
112 |
+
os.makedirs(local_folder_path, exist_ok=True)
|
113 |
+
local_file_path = os.path.join(local_folder_path, file_name)
|
114 |
+
with open(local_file_path, "wb") as f:
|
115 |
+
metadata, response = dbx.files_download(f"/{id}/{file_name}")
|
116 |
+
f.write(response.content)
|
117 |
+
print(f"Downloaded file '{file_name}' to '{local_file_path}'")
|
118 |
+
except dropbox.exceptions.ApiError as e:
|
119 |
+
print(f"Error downloading file '{file_name}': {e}")
|
120 |
+
raise HTTPException(status_code=500, detail="Internal Server Error")
|
121 |
+
|
122 |
+
def search_and_download_file(start_char, id):
|
123 |
+
try:
|
124 |
+
TOKEN = refresh_token_dropbox()
|
125 |
+
dbx = dropbox.Dropbox(TOKEN)
|
126 |
+
result = dbx.files_list_folder(f"/{id}")
|
127 |
+
files_starting_with_char = [entry.name for entry in result.entries if entry.name.startswith(start_char)]
|
128 |
+
if len(files_starting_with_char) == 0:
|
129 |
+
print(f"No file found starting with '{start_char}' in folder '{id}'")
|
130 |
+
return
|
131 |
+
file_name = files_starting_with_char[0]
|
132 |
+
local_folder_path = f"./user_file/{id}"
|
133 |
+
os.makedirs(local_folder_path, exist_ok=True)
|
134 |
+
local_file_path = os.path.join(local_folder_path, file_name)
|
135 |
+
with open(local_file_path, "wb") as f:
|
136 |
+
metadata, response = dbx.files_download(f"/{id}/{file_name}")
|
137 |
+
f.write(response.content)
|
138 |
+
print(f"Downloaded file '{file_name}' to '{local_file_path}'")
|
139 |
+
except dropbox.exceptions.ApiError as e:
|
140 |
+
print(f"Error searching or downloading file: {e}")
|
141 |
+
raise HTTPException(status_code=500, detail="Internal Server Error")
|
142 |
+
|
143 |
+
def delete_all_files_in_folder(folder_id):
|
144 |
+
try:
|
145 |
+
TOKEN = refresh_token_dropbox()
|
146 |
+
dbx = dropbox.Dropbox(TOKEN)
|
147 |
+
result = dbx.files_list_folder(f"/{folder_id}")
|
148 |
+
for entry in result.entries:
|
149 |
+
if isinstance(entry, dropbox.files.FileMetadata):
|
150 |
+
file_path = entry.path_display
|
151 |
+
dbx.files_delete_v2(file_path)
|
152 |
+
print(f"Deleted file '{file_path}'")
|
153 |
+
print(f"All files in folder '{folder_id}' have been deleted.")
|
154 |
+
except dropbox.exceptions.ApiError as e:
|
155 |
+
print(f"Error deleting files: {e}")
|
function/support_function.py
ADDED
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic.error_wrappers import ErrorWrapper
|
2 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
3 |
+
from service import MySQLService,UserService,ChatService
|
4 |
+
from request import RequestMySQL,RequestUser,RequestDefault
|
5 |
+
from auth.authentication import decodeJWT
|
6 |
+
from repository import UserRepository
|
7 |
+
from auth import authentication
|
8 |
+
from datetime import datetime, timedelta
|
9 |
+
from fastapi import Depends, HTTPException, Form, File, UploadFile
|
10 |
+
from typing import List
|
11 |
+
from service import FileService,DefaultService,UserService
|
12 |
+
from request import RequestFile,RequestChat,RequestDefault
|
13 |
+
from fastapi import FastAPI, Request, HTTPException
|
14 |
+
from fastapi.responses import JSONResponse
|
15 |
+
from pydantic.error_wrappers import ErrorWrapper
|
16 |
+
import json
|
17 |
+
from function import support_function
|
18 |
+
from repository import UserRepository
|
19 |
+
from response import ResponseDefault as res
|
20 |
+
import re
|
21 |
+
|
22 |
+
def is_positive_integer(value):
|
23 |
+
if isinstance(value, int) and value > 0:
|
24 |
+
return True
|
25 |
+
else:
|
26 |
+
return False
|
27 |
+
|
28 |
+
def check_value_user_id_controller(user_id: str):
|
29 |
+
if user_id is None or user_id.strip() == "":
|
30 |
+
return res.ReponseError(status=400,
|
31 |
+
data=res.Message(message="user_id field is required."))
|
32 |
+
user_id = user_id.strip("'").strip('"')
|
33 |
+
try:
|
34 |
+
user_id_int = int(user_id)
|
35 |
+
except ValueError:
|
36 |
+
return res.ReponseError(status=400,
|
37 |
+
data=res.Message(message="user_id must be an integer"))
|
38 |
+
|
39 |
+
if not support_function.is_positive_integer(user_id_int):
|
40 |
+
return res.ReponseError(status=400,
|
41 |
+
data=res.Message(message="user_id must be greater than 0"))
|
42 |
+
return True
|
43 |
+
|
44 |
+
def check_value_user_id(user_id: str, current_user_email: str):
|
45 |
+
if user_id is None or user_id.strip() == "":
|
46 |
+
return res.ReponseError(status=400,
|
47 |
+
data=res.Message(message="user_id field is required."))
|
48 |
+
user_id = user_id.strip("'").strip('"')
|
49 |
+
try:
|
50 |
+
user_id_int = int(user_id)
|
51 |
+
except ValueError:
|
52 |
+
return res.ReponseError(status=400,
|
53 |
+
data=res.Message(message="user_id must be an integer"))
|
54 |
+
|
55 |
+
if not support_function.is_positive_integer(user_id_int):
|
56 |
+
return res.ReponseError(status=400,
|
57 |
+
data=res.Message(message="user_id must be greater than 0"))
|
58 |
+
email = UserRepository.getEmailUserByIdFix(user_id)
|
59 |
+
if email is None:
|
60 |
+
return res.ReponseError(status=404,
|
61 |
+
data=res.Message(message="user_id not exist"))
|
62 |
+
email = email[0]
|
63 |
+
if email != current_user_email:
|
64 |
+
raise HTTPException(status_code=403, detail="Sorry, you can't perform actions with this user id.")
|
65 |
+
return True
|
66 |
+
|
67 |
+
def check_value_email_controller(email: str):
|
68 |
+
if email is None or email.strip() == "":
|
69 |
+
return res.ReponseError(status = 400,
|
70 |
+
data = res.Message(message="Email is required."))
|
71 |
+
try:
|
72 |
+
int(email)
|
73 |
+
return res.ReponseError(status=400,
|
74 |
+
data=res.Message(message="Email must be a string, not a number."))
|
75 |
+
except ValueError:
|
76 |
+
pass
|
77 |
+
return True
|
78 |
+
|
79 |
+
def check_value_otp(otp: str):
|
80 |
+
if otp is None:
|
81 |
+
return res.ReponseError(status=400,
|
82 |
+
data=res.Message(message="OTP is required"))
|
83 |
+
if otp.isdigit():
|
84 |
+
return res.ReponseError(status=400,
|
85 |
+
data=res.Message(message="OTP must be a string, not a number."))
|
86 |
+
if len(otp) != 6:
|
87 |
+
return res.ReponseError(status=400,
|
88 |
+
data=res.Message(message="OTP max length is 6"))
|
89 |
+
return True
|
90 |
+
|
91 |
+
regex = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,7}\b'
|
92 |
+
def check_email(email):
|
93 |
+
if(re.fullmatch(regex, email)):
|
94 |
+
return True
|
95 |
+
else:
|
96 |
+
return False
|
97 |
+
def check_email_service(user_id: str):
|
98 |
+
email1 = UserRepository.getEmailUserByIdFix(user_id)
|
99 |
+
if email1 is None:
|
100 |
+
return res.ReponseError(
|
101 |
+
status=404,
|
102 |
+
data=res.Message(message="Id not exist")
|
103 |
+
)
|
104 |
+
email = email1[0]
|
105 |
+
if email is None:
|
106 |
+
return res.ReponseError(
|
107 |
+
status=400,
|
108 |
+
data=res.Message(message="Email is empty")
|
109 |
+
)
|
110 |
+
if check_email(email) == False:
|
111 |
+
return res.ReponseError(
|
112 |
+
status=400,
|
113 |
+
data=res.Message(message="Email invalid")
|
114 |
+
)
|
115 |
+
return email
|
116 |
+
|
117 |
+
def check_email_empty_invalid(email: str):
|
118 |
+
if email is None or email == "":
|
119 |
+
return res.ReponseError(
|
120 |
+
status=400,
|
121 |
+
data=res.Message(message="Email is empty")
|
122 |
+
)
|
123 |
+
if check_email(email) == False:
|
124 |
+
return res.ReponseError(
|
125 |
+
status=400,
|
126 |
+
data =res.Message(message="Email invalid")
|
127 |
+
)
|
128 |
+
return True
|
models/Database_Entity.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import Column, String, Text, DateTime, Integer, ForeignKey, TIMESTAMP
|
2 |
+
from sqlalchemy.orm import relationship
|
3 |
+
from sqlalchemy.orm import DeclarativeBase
|
4 |
+
from sqlalchemy.sql import func
|
5 |
+
class Base(DeclarativeBase):
|
6 |
+
pass
|
7 |
+
|
8 |
+
class User(Base):
|
9 |
+
__tablename__ = 'users'
|
10 |
+
id = Column(Integer, primary_key=True, autoincrement=True)
|
11 |
+
email = Column(String(255))
|
12 |
+
access_token = Column(Text)
|
13 |
+
refresh_token = Column(Text)
|
14 |
+
expires_at = Column(DateTime)
|
15 |
+
|
16 |
+
chat_histories = relationship("ChatHistory", back_populates="user")
|
17 |
+
user_logins = relationship("UserLogin", back_populates="user")
|
18 |
+
user_infos = relationship("UserInfo", back_populates="user")
|
19 |
+
|
20 |
+
class ChatHistory(Base):
|
21 |
+
__tablename__ = 'chat_history'
|
22 |
+
|
23 |
+
id = Column(Integer, primary_key=True,autoincrement=True)
|
24 |
+
email = Column(String(255), ForeignKey('users.email'))
|
25 |
+
name_chat = Column(String(255), unique=True)
|
26 |
+
|
27 |
+
user = relationship("User", back_populates="chat_histories")
|
28 |
+
detail_chats = relationship("DetailChat", back_populates="chat_history")
|
29 |
+
|
30 |
+
class UserLogin(Base):
|
31 |
+
__tablename__ = 'user_login'
|
32 |
+
|
33 |
+
id = Column(Integer, primary_key=True,autoincrement=True)
|
34 |
+
user_email = Column(String(100), ForeignKey('users.email'), primary_key=True)
|
35 |
+
user_session_id = Column(String(100), primary_key=True)
|
36 |
+
|
37 |
+
user = relationship("User", back_populates="user_logins")
|
38 |
+
|
39 |
+
class UserInfo(Base):
|
40 |
+
__tablename__ = 'user_info'
|
41 |
+
|
42 |
+
id = Column(Integer, primary_key=True, autoincrement=True)
|
43 |
+
uid = Column(Text)
|
44 |
+
email = Column(String(255), ForeignKey('users.email'), unique=True)
|
45 |
+
display_name = Column(Text)
|
46 |
+
photo_url = Column(Text)
|
47 |
+
|
48 |
+
user = relationship("User", back_populates="user_infos")
|
49 |
+
|
50 |
+
class DetailChat(Base):
|
51 |
+
__tablename__ = 'detail_chat'
|
52 |
+
|
53 |
+
id = Column(Integer, primary_key=True, autoincrement=True)
|
54 |
+
chat_id = Column(Integer, ForeignKey('chat_history.id'))
|
55 |
+
YouMessage = Column(Text)
|
56 |
+
AiMessage = Column(Text)
|
57 |
+
data_relevant = Column(Text)
|
58 |
+
source_file = Column(Text)
|
59 |
+
|
60 |
+
chat_history = relationship("ChatHistory", back_populates="detail_chats")
|
61 |
+
|
62 |
+
class OTP(Base):
|
63 |
+
__tablename__ = 'otp'
|
64 |
+
|
65 |
+
id = Column(Integer, primary_key=True, autoincrement=True)
|
66 |
+
email = Column(String(255), nullable=False)
|
67 |
+
otp = Column(String(6), nullable=False)
|
68 |
+
created_at = Column(TIMESTAMP, server_default=func.now())
|
models/__pycache__/Database_Entity.cpython-310.pyc
ADDED
Binary file (2.81 kB). View file
|
|
models/__pycache__/Database_Entity.cpython-311.pyc
ADDED
Binary file (4.77 kB). View file
|
|
models/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (168 Bytes). View file
|
|
repository/ChatHistoryRepository.py
ADDED
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
import sys
|
3 |
+
import os
|
4 |
+
app_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
5 |
+
sys.path.insert(0, app_path)
|
6 |
+
from models import Database_Entity
|
7 |
+
from repository import ConfigDatabase as cf
|
8 |
+
chat_history = Database_Entity.ChatHistory
|
9 |
+
users = Database_Entity.User
|
10 |
+
detail_chat = Database_Entity.DetailChat
|
11 |
+
from sqlalchemy.orm import sessionmaker
|
12 |
+
from functools import lru_cache
|
13 |
+
import sys
|
14 |
+
import os
|
15 |
+
|
16 |
+
def getIdChatHistoryByUserIdAndNameChat(user_id:int,name_old :str) -> chat_history.id:
|
17 |
+
try:
|
18 |
+
engine = cf.get_db_engine()
|
19 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
20 |
+
with Session() as session:
|
21 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
22 |
+
chat_id = session.query(chat_history.id).filter(chat_history.email == email, chat_history.name_chat == name_old).scalar()
|
23 |
+
session.commit()
|
24 |
+
if chat_id:
|
25 |
+
session.close()
|
26 |
+
return chat_id
|
27 |
+
else:
|
28 |
+
session.close()
|
29 |
+
return None
|
30 |
+
except:
|
31 |
+
engine = cf.get_db_engine1()
|
32 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
33 |
+
with Session() as session:
|
34 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
35 |
+
chat_id = session.query(chat_history.id).filter(chat_history.email == email, chat_history.name_chat == name_old).scalar()
|
36 |
+
session.commit()
|
37 |
+
if chat_id:
|
38 |
+
session.close()
|
39 |
+
return chat_id
|
40 |
+
else:
|
41 |
+
session.close()
|
42 |
+
return None
|
43 |
+
|
44 |
+
def getIdChatHistoryByUserIdAndNameChatNew(user_id:int,name_old :str) -> chat_history.id:
|
45 |
+
try:
|
46 |
+
engine = cf.get_db_engine()
|
47 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
48 |
+
with Session() as session:
|
49 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
50 |
+
chat_id = session.query(chat_history.id).filter(chat_history.email == email, chat_history.name_chat == name_old).scalar()
|
51 |
+
session.commit()
|
52 |
+
if chat_id:
|
53 |
+
session.close()
|
54 |
+
return chat_id
|
55 |
+
else:
|
56 |
+
session.close()
|
57 |
+
return None
|
58 |
+
except:
|
59 |
+
engine = cf.get_db_engine1()
|
60 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
61 |
+
with Session() as session:
|
62 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
63 |
+
chat_id = session.query(chat_history.id).filter(chat_history.email == email,
|
64 |
+
chat_history.name_chat == name_old).scalar()
|
65 |
+
session.commit()
|
66 |
+
if chat_id:
|
67 |
+
session.close()
|
68 |
+
return chat_id
|
69 |
+
else:
|
70 |
+
session.close()
|
71 |
+
return None
|
72 |
+
|
73 |
+
def updateNameChatHistory(user_id: int,name_old :str,name_new:str) -> bool:
|
74 |
+
try:
|
75 |
+
engine = cf.get_db_engine1()
|
76 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
77 |
+
with Session() as session:
|
78 |
+
try:
|
79 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
80 |
+
session.query(chat_history).filter(chat_history.email == email,chat_history.name_chat == name_old).update({chat_history.name_chat: name_new})
|
81 |
+
session.commit()
|
82 |
+
session.close()
|
83 |
+
return True
|
84 |
+
except:
|
85 |
+
session.rollback()
|
86 |
+
session.close()
|
87 |
+
return False
|
88 |
+
except:
|
89 |
+
engine = cf.get_db_engine()
|
90 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
91 |
+
with Session() as session:
|
92 |
+
try:
|
93 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
94 |
+
session.query(chat_history).filter(chat_history.email == email,chat_history.name_chat == name_old).update({chat_history.name_chat: name_new})
|
95 |
+
session.commit()
|
96 |
+
session.close()
|
97 |
+
return True
|
98 |
+
except:
|
99 |
+
session.rollback()
|
100 |
+
session.close()
|
101 |
+
return False
|
102 |
+
|
103 |
+
def deleteChatHistory(user_id,chat_name: str) -> bool:
|
104 |
+
try:
|
105 |
+
try:
|
106 |
+
engine = cf.get_db_engine1()
|
107 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
108 |
+
with Session() as session:
|
109 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
110 |
+
session.query(chat_history).filter(chat_history.email == email, chat_history.name_chat == chat_name).delete()
|
111 |
+
session.commit()
|
112 |
+
session.close()
|
113 |
+
return True
|
114 |
+
except Exception as e:
|
115 |
+
session.rollback()
|
116 |
+
session.close()
|
117 |
+
return False
|
118 |
+
except:
|
119 |
+
try:
|
120 |
+
engine = cf.get_db_engine()
|
121 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
122 |
+
with Session() as session:
|
123 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
124 |
+
session.query(chat_history).filter(chat_history.email == email, chat_history.name_chat == chat_name).delete()
|
125 |
+
session.commit()
|
126 |
+
session.close()
|
127 |
+
return True
|
128 |
+
except Exception as e:
|
129 |
+
session.rollback()
|
130 |
+
session.close()
|
131 |
+
return False
|
132 |
+
|
133 |
+
|
134 |
+
def getChatHistoryByEmail(email: str) -> chat_history:
|
135 |
+
try:
|
136 |
+
engine = cf.get_db_engine1()
|
137 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
138 |
+
with Session() as session:
|
139 |
+
chat_history1 = session.query(chat_history).filter(chat_history.email == email)
|
140 |
+
if chat_history1:
|
141 |
+
session.commit()
|
142 |
+
session.close()
|
143 |
+
return chat_history1
|
144 |
+
session.close()
|
145 |
+
return None
|
146 |
+
except:
|
147 |
+
engine = cf.get_db_engine()
|
148 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
149 |
+
with Session() as session:
|
150 |
+
chat_history1 = session.query(chat_history).filter(chat_history.email == email)
|
151 |
+
if chat_history1:
|
152 |
+
session.commit()
|
153 |
+
session.close()
|
154 |
+
return chat_history1
|
155 |
+
session.close()
|
156 |
+
return None
|
157 |
+
|
158 |
+
from sqlalchemy.orm import aliased
|
159 |
+
|
160 |
+
def delete_last_chat_detail_by_chat_name_and_email(chat_name: str, user_id: int) -> bool:
|
161 |
+
try:
|
162 |
+
engine = cf.get_db_engine()
|
163 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
164 |
+
with Session() as session:
|
165 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
166 |
+
if not email:
|
167 |
+
return False
|
168 |
+
email = email[0]
|
169 |
+
last_chat_detail = (session.query(detail_chat)
|
170 |
+
.join(chat_history, detail_chat.chat_id == chat_history.id)
|
171 |
+
.filter(chat_history.name_chat == chat_name, chat_history.email == email)
|
172 |
+
.order_by(detail_chat.id.desc())
|
173 |
+
.first())
|
174 |
+
|
175 |
+
if last_chat_detail:
|
176 |
+
session.delete(last_chat_detail)
|
177 |
+
session.commit()
|
178 |
+
return True
|
179 |
+
return False
|
180 |
+
except:
|
181 |
+
engine = cf.get_db_engine1()
|
182 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
183 |
+
with Session() as session:
|
184 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
185 |
+
if not email:
|
186 |
+
return False
|
187 |
+
email = email[0]
|
188 |
+
last_chat_detail = (session.query(detail_chat)
|
189 |
+
.join(chat_history, detail_chat.chat_id == chat_history.id)
|
190 |
+
.filter(chat_history.name_chat == chat_name, chat_history.email == email)
|
191 |
+
.order_by(detail_chat.id.desc())
|
192 |
+
.first())
|
193 |
+
|
194 |
+
if last_chat_detail:
|
195 |
+
session.delete(last_chat_detail)
|
196 |
+
session.commit()
|
197 |
+
return True
|
198 |
+
return False
|
199 |
+
def getChatHistoryByChatIdAndUserId(chat_id: int, user_id: int) -> chat_history:
|
200 |
+
try:
|
201 |
+
engine = cf.get_db_engine()
|
202 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
203 |
+
with Session() as session:
|
204 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
205 |
+
chat_history1 = session.query(chat_history).filter(chat_history.id == chat_id,chat_history.email == email).one_or_none()
|
206 |
+
if chat_history1:
|
207 |
+
session.commit()
|
208 |
+
session.close()
|
209 |
+
return True
|
210 |
+
session.close()
|
211 |
+
return None
|
212 |
+
except:
|
213 |
+
engine = cf.get_db_engine1()
|
214 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
215 |
+
with Session() as session:
|
216 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
217 |
+
chat_history1 = session.query(chat_history).filter(chat_history.email == email)
|
218 |
+
if chat_history1:
|
219 |
+
session.commit()
|
220 |
+
session.close()
|
221 |
+
return True
|
222 |
+
session.close()
|
223 |
+
return None
|
224 |
+
|
225 |
+
|
226 |
+
def getChatHistoryById(id: int) -> chat_history:
|
227 |
+
try:
|
228 |
+
engine = cf.get_db_engine()
|
229 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
230 |
+
with Session() as session:
|
231 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
232 |
+
chat_history1 = session.query(chat_history).filter(chat_history.email == email)
|
233 |
+
if chat_history1:
|
234 |
+
session.commit()
|
235 |
+
session.close()
|
236 |
+
return chat_history1
|
237 |
+
session.close()
|
238 |
+
return None
|
239 |
+
except:
|
240 |
+
engine = cf.get_db_engine1()
|
241 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
242 |
+
with Session() as session:
|
243 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
244 |
+
chat_history1 = session.query(chat_history).filter(chat_history.email == email)
|
245 |
+
if chat_history1:
|
246 |
+
session.commit()
|
247 |
+
session.close()
|
248 |
+
return chat_history1
|
249 |
+
session.close()
|
250 |
+
return None
|
251 |
+
|
252 |
+
def addChatHistory(user_id: str, name_chat:str)->None:
|
253 |
+
try:
|
254 |
+
engine = cf.get_db_engine()
|
255 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
256 |
+
with Session() as session:
|
257 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
258 |
+
new_user = chat_history(
|
259 |
+
email = email,
|
260 |
+
name_chat = name_chat
|
261 |
+
)
|
262 |
+
session.add(new_user)
|
263 |
+
session.commit()
|
264 |
+
session.close()
|
265 |
+
except:
|
266 |
+
engine = cf.get_db_engine1()
|
267 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
268 |
+
with Session() as session:
|
269 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()[0]
|
270 |
+
new_user = chat_history(
|
271 |
+
email = email,
|
272 |
+
name_chat = name_chat
|
273 |
+
)
|
274 |
+
session.add(new_user)
|
275 |
+
session.commit()
|
276 |
+
session.close()
|
repository/ConfigDatabase.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import create_engine, URL
|
2 |
+
from sqlalchemy.orm import DeclarativeBase
|
3 |
+
Base = DeclarativeBase()
|
4 |
+
from sqlalchemy.engine import create_engine, URL
|
5 |
+
from dotenv import load_dotenv
|
6 |
+
import os
|
7 |
+
load_dotenv()
|
8 |
+
MYSQL_USER_NAME=os.getenv('MYSQL_USER_NAME')
|
9 |
+
MYSQL_PASSWORD=os.getenv('MYSQL_PASSWOR')
|
10 |
+
MYSQL_PORT=os.getenv('MYSQL_PORT')
|
11 |
+
MYSQL_DATABASE=os.getenv('MYSQL_DATABASE')
|
12 |
+
MYSQL_HOST=os.getenv('MYSQL_HOST')
|
13 |
+
#IF USE DOCKER HOST = host.docker.internal
|
14 |
+
def get_db_engine():
|
15 |
+
dsn = URL.create(
|
16 |
+
drivername="mysql+pymysql",
|
17 |
+
username=MYSQL_USER_NAME,
|
18 |
+
password=MYSQL_PASSWORD,
|
19 |
+
host=MYSQL_HOST,
|
20 |
+
port=MYSQL_PORT,
|
21 |
+
database=MYSQL_DATABASE
|
22 |
+
)
|
23 |
+
connect_args = {}
|
24 |
+
return create_engine(
|
25 |
+
dsn,
|
26 |
+
connect_args=connect_args,
|
27 |
+
pool_size=20,
|
28 |
+
pool_recycle=300,
|
29 |
+
pool_pre_ping=True
|
30 |
+
)
|
31 |
+
|
32 |
+
def get_db_engine1():
|
33 |
+
dsn = URL.create(
|
34 |
+
drivername="mysql+pymysql",
|
35 |
+
username=MYSQL_USER_NAME,
|
36 |
+
password=MYSQL_PASSWORD,
|
37 |
+
host=MYSQL_HOST,
|
38 |
+
port=MYSQL_PORT,
|
39 |
+
database=MYSQL_DATABASE
|
40 |
+
)
|
41 |
+
connect_args = {}
|
42 |
+
return create_engine(
|
43 |
+
dsn,
|
44 |
+
connect_args=connect_args,
|
45 |
+
pool_size=20,
|
46 |
+
pool_recycle=300,
|
47 |
+
pool_pre_ping=True
|
48 |
+
)
|
repository/DetailChatRepository.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
from models import Database_Entity
|
3 |
+
from repository import ConfigDatabase as cf
|
4 |
+
detail_chat = Database_Entity.DetailChat
|
5 |
+
chat_history = Database_Entity.ChatHistory
|
6 |
+
|
7 |
+
def getListDetailChatByChatId(chat_id: int) -> detail_chat:
|
8 |
+
try:
|
9 |
+
engine = cf.get_db_engine1()
|
10 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
11 |
+
with Session() as session:
|
12 |
+
chat_record= session.query(detail_chat).filter(detail_chat.chat_id == chat_id)
|
13 |
+
session.commit()
|
14 |
+
if chat_record:
|
15 |
+
session.close()
|
16 |
+
return chat_record
|
17 |
+
else:
|
18 |
+
session.close()
|
19 |
+
return None
|
20 |
+
except:
|
21 |
+
engine = cf.get_db_engine()
|
22 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
23 |
+
with Session() as session:
|
24 |
+
chat_record= session.query(detail_chat).filter(detail_chat.chat_id == chat_id)
|
25 |
+
session.commit()
|
26 |
+
if chat_record:
|
27 |
+
session.close()
|
28 |
+
return chat_record
|
29 |
+
else:
|
30 |
+
session.close()
|
31 |
+
return None
|
32 |
+
|
33 |
+
def addDetailChat(chat_id: int, YouMessage: str, AiMessage: str, data_relevant: str, source_file: str) -> None:
|
34 |
+
try:
|
35 |
+
engine = cf.get_db_engine()
|
36 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
37 |
+
with Session() as session:
|
38 |
+
new_user = detail_chat(
|
39 |
+
chat_id = chat_id,
|
40 |
+
YouMessage = YouMessage,
|
41 |
+
AiMessage = AiMessage,
|
42 |
+
data_relevant = data_relevant,
|
43 |
+
source_file = source_file
|
44 |
+
)
|
45 |
+
session.add(new_user)
|
46 |
+
session.commit()
|
47 |
+
return new_user.id
|
48 |
+
session.close()
|
49 |
+
except:
|
50 |
+
engine = cf.get_db_engine1()
|
51 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
52 |
+
with Session() as session:
|
53 |
+
new_user = detail_chat(
|
54 |
+
chat_id=chat_id,
|
55 |
+
YouMessage=YouMessage,
|
56 |
+
AiMessage=AiMessage,
|
57 |
+
data_relevant=data_relevant,
|
58 |
+
source_file=source_file
|
59 |
+
)
|
60 |
+
session.add(new_user)
|
61 |
+
session.commit()
|
62 |
+
return new_user.id
|
63 |
+
session.close()
|
64 |
+
|
65 |
+
def getDetailChatByChatId(id: int) -> detail_chat:
|
66 |
+
try:
|
67 |
+
engine = cf.get_db_engine()
|
68 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
69 |
+
with Session() as session:
|
70 |
+
try:
|
71 |
+
chat = session.query(detail_chat).filter(detail_chat.id == id).one_or_none()
|
72 |
+
return chat
|
73 |
+
except:
|
74 |
+
session.close()
|
75 |
+
return False
|
76 |
+
except:
|
77 |
+
engine = cf.get_db_engine1()
|
78 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
79 |
+
with Session() as session:
|
80 |
+
try:
|
81 |
+
chat = session.query(detail_chat.id,detail_chat.data_relevant,detail_chat.source_file).filter(detail_chat.id == id).one_or_none()
|
82 |
+
session.commit()
|
83 |
+
session.close()
|
84 |
+
return chat
|
85 |
+
except:
|
86 |
+
session.close()
|
87 |
+
return False
|
88 |
+
|
89 |
+
|
90 |
+
def delete_chat_detail(chat_name: str) -> bool:
|
91 |
+
try:
|
92 |
+
engine = cf.get_db_engine()
|
93 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
94 |
+
with Session() as session:
|
95 |
+
try:
|
96 |
+
detail_chat2 = session.query(detail_chat).filter(detail_chat.chat_id == chat_history.id).filter(chat_history.name_chat == chat_name)
|
97 |
+
session.query(detail_chat).filter(detail_chat.chat_id == chat_history.id).filter(chat_history.name_chat == chat_name).delete(synchronize_session=False)
|
98 |
+
session.commit()
|
99 |
+
session.close()
|
100 |
+
return True
|
101 |
+
except:
|
102 |
+
session.close()
|
103 |
+
return False
|
104 |
+
except:
|
105 |
+
engine = cf.get_db_engine1()
|
106 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
107 |
+
with Session() as session:
|
108 |
+
try:
|
109 |
+
session.query(detail_chat).filter(detail_chat.chat_id == chat_history.id).filter(chat_history.name_chat == chat_name).delete(synchronize_session=False)
|
110 |
+
session.commit()
|
111 |
+
session.close()
|
112 |
+
return True
|
113 |
+
except:
|
114 |
+
session.close()
|
115 |
+
return False
|
116 |
+
def delete_chat_detail_by_id(id_chat_detail: int) -> bool:
|
117 |
+
try:
|
118 |
+
engine = cf.get_db_engine()
|
119 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
120 |
+
with Session() as session:
|
121 |
+
try:
|
122 |
+
|
123 |
+
session.query(detail_chat).filter(detail_chat.id == id_chat_detail).delete(synchronize_session=False)
|
124 |
+
session.commit()
|
125 |
+
session.close()
|
126 |
+
return True
|
127 |
+
except:
|
128 |
+
session.close()
|
129 |
+
return False
|
130 |
+
except:
|
131 |
+
engine = cf.get_db_engine1()
|
132 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
133 |
+
with Session() as session:
|
134 |
+
try:
|
135 |
+
session.query(detail_chat).filter(detail_chat.chat_id == id_chat_detail).delete(synchronize_session=False)
|
136 |
+
session.commit()
|
137 |
+
session.close()
|
138 |
+
return True
|
139 |
+
except:
|
140 |
+
session.close()
|
141 |
+
return False
|
repository/OTPRepository.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
from models import Database_Entity
|
3 |
+
from repository import ConfigDatabase as cf
|
4 |
+
otp_user = Database_Entity.OTP
|
5 |
+
from sqlalchemy.orm import sessionmaker
|
6 |
+
from functools import lru_cache
|
7 |
+
import sys
|
8 |
+
import os
|
9 |
+
|
10 |
+
def getOtpByEmail(email: str) -> otp_user:
|
11 |
+
try:
|
12 |
+
engine = cf.get_db_engine()
|
13 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
14 |
+
with Session() as session:
|
15 |
+
user_record= session.query(otp_user).filter(otp_user.email == email).one_or_none()
|
16 |
+
if user_record:
|
17 |
+
session.close()
|
18 |
+
return user_record
|
19 |
+
else:
|
20 |
+
session.close()
|
21 |
+
return None
|
22 |
+
except:
|
23 |
+
engine = cf.get_db_engine1()
|
24 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
25 |
+
with Session() as session:
|
26 |
+
user_record= session.query(otp_user).filter(otp_user.email == email).one_or_none()
|
27 |
+
if user_record:
|
28 |
+
session.close()
|
29 |
+
return user_record
|
30 |
+
else:
|
31 |
+
session.close()
|
32 |
+
return None
|
33 |
+
|
34 |
+
def addOTP(email: str, otp: str) -> None:
|
35 |
+
try:
|
36 |
+
engine = cf.get_db_engine()
|
37 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
38 |
+
with Session() as session:
|
39 |
+
otp_record = session.query(otp_user).filter_by(email=email).first()
|
40 |
+
if otp_record:
|
41 |
+
session.delete(otp_record)
|
42 |
+
session.commit()
|
43 |
+
new_user = otp_user(
|
44 |
+
email = email,
|
45 |
+
otp= otp
|
46 |
+
)
|
47 |
+
session.add(new_user)
|
48 |
+
session.commit()
|
49 |
+
session.close()
|
50 |
+
except:
|
51 |
+
engine = cf.get_db_engine1()
|
52 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
53 |
+
with Session() as session:
|
54 |
+
otp_record = session.query(otp_user).filter_by(email=email).first()
|
55 |
+
if otp_record:
|
56 |
+
session.delete(otp_record)
|
57 |
+
session.commit()
|
58 |
+
new_user = otp_user(
|
59 |
+
email = email,
|
60 |
+
otp= otp
|
61 |
+
)
|
62 |
+
session.add(new_user)
|
63 |
+
session.commit()
|
64 |
+
session.close()
|
65 |
+
|
66 |
+
def deleteOTP(email: str, otp:str) -> None:
|
67 |
+
try:
|
68 |
+
engine = cf.get_db_engine()
|
69 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
70 |
+
with Session() as session:
|
71 |
+
otp_record = session.query(otp_user).filter_by(email=email, otp=otp).first()
|
72 |
+
if otp_record:
|
73 |
+
session.delete(otp_record)
|
74 |
+
session.commit()
|
75 |
+
except:
|
76 |
+
engine = cf.get_db_engine1()
|
77 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
78 |
+
with Session() as session:
|
79 |
+
otp_record = session.query(otp_user).filter_by(email=email, otp=otp).first()
|
80 |
+
if otp_record:
|
81 |
+
session.delete(otp_record)
|
82 |
+
session.commit()
|
repository/UserInfoRepository.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
from models import Database_Entity
|
3 |
+
from repository import ConfigDatabase as cf
|
4 |
+
user_info = Database_Entity.UserInfo
|
5 |
+
users = Database_Entity.User
|
6 |
+
from sqlalchemy.orm import sessionmaker
|
7 |
+
import sys
|
8 |
+
import os
|
9 |
+
|
10 |
+
def getUserInfo(user_id: int) -> user_info:
|
11 |
+
try:
|
12 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
13 |
+
if email:
|
14 |
+
email = email[0]
|
15 |
+
user_record= session.query(user_info).filter(user_info.email == email).one_or_none()
|
16 |
+
if user_record:
|
17 |
+
session.close()
|
18 |
+
return user_record
|
19 |
+
else:
|
20 |
+
session.close()
|
21 |
+
return None
|
22 |
+
except:
|
23 |
+
engine = cf.get_db_engine1()
|
24 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
25 |
+
with Session() as session:
|
26 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
27 |
+
if email:
|
28 |
+
email = email[0]
|
29 |
+
user_record= session.query(user_info).filter(user_info.email == email).one_or_none()
|
30 |
+
if user_record:
|
31 |
+
session.close()
|
32 |
+
return user_record
|
33 |
+
else:
|
34 |
+
session.close()
|
35 |
+
return None
|
36 |
+
|
37 |
+
def getUserInfoByEmail(email:str) -> user_info:
|
38 |
+
try:
|
39 |
+
engine = cf.get_db_engine()
|
40 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
41 |
+
with Session() as session:
|
42 |
+
user_record= session.query(user_info).filter(user_info.email == email).one_or_none()
|
43 |
+
if user_record:
|
44 |
+
session.close()
|
45 |
+
return user_record
|
46 |
+
else:
|
47 |
+
session.close()
|
48 |
+
return None
|
49 |
+
except:
|
50 |
+
engine = cf.get_db_engine1()
|
51 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
52 |
+
with Session() as session:
|
53 |
+
user_record= session.query(user_info).filter(user_info.email == email).one_or_none()
|
54 |
+
if user_record:
|
55 |
+
session.close()
|
56 |
+
return user_record
|
57 |
+
else:
|
58 |
+
session.close()
|
59 |
+
return None
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
def addUserInfo(uid: str, email: str, display_name: str, photo_url: str) -> None:
|
64 |
+
try:
|
65 |
+
engine = cf.get_db_engine()
|
66 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
67 |
+
with Session() as session:
|
68 |
+
new_user = user_info(
|
69 |
+
uid = uid,
|
70 |
+
email = email,
|
71 |
+
display_name = display_name,
|
72 |
+
photo_url = photo_url
|
73 |
+
)
|
74 |
+
session.add(new_user)
|
75 |
+
session.commit()
|
76 |
+
session.close()
|
77 |
+
except:
|
78 |
+
engine = cf.get_db_engine1()
|
79 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
80 |
+
with Session() as session:
|
81 |
+
new_user = user_info(
|
82 |
+
uid = uid,
|
83 |
+
email = email,
|
84 |
+
display_name = display_name,
|
85 |
+
photo_url = photo_url
|
86 |
+
)
|
87 |
+
session.add(new_user)
|
88 |
+
session.commit()
|
89 |
+
session.close()
|
90 |
+
|
91 |
+
def updateUserInfo(user_id, uid: str, email: str, display_name: str, photo_url: str) -> None:
|
92 |
+
try:
|
93 |
+
engine = cf.get_db_engine()
|
94 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
95 |
+
with Session() as session:
|
96 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
97 |
+
user_update= session.query(user_info).filter(user_info.email == email).one_or_none()
|
98 |
+
if user_update is not None:
|
99 |
+
user_update.uid = uid,
|
100 |
+
user_update.display_name = display_name,
|
101 |
+
user_update.photo_url = photo_url
|
102 |
+
session.commit()
|
103 |
+
session.close()
|
104 |
+
except:
|
105 |
+
engine = cf.get_db_engine1()
|
106 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
107 |
+
with Session() as session:
|
108 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
109 |
+
user_update= session.query(user_info).filter(user_info.email == email).one_or_none()
|
110 |
+
if user_update is not None:
|
111 |
+
user_update.uid = uid,
|
112 |
+
user_update.display_name = display_name,
|
113 |
+
user_update.photo_url = photo_url
|
114 |
+
session.commit()
|
115 |
+
session.close()
|
116 |
+
|
117 |
+
|
118 |
+
def updateImage(user_id, photo_url: str) -> None:
|
119 |
+
try:
|
120 |
+
engine = cf.get_db_engine()
|
121 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
122 |
+
with Session() as session:
|
123 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
124 |
+
user_update= session.query(user_info).filter(user_info.email == email).one_or_none()
|
125 |
+
if user_update is not None:
|
126 |
+
user_update.photo_url = photo_url
|
127 |
+
session.commit()
|
128 |
+
session.close()
|
129 |
+
except:
|
130 |
+
engine = cf.get_db_engine1()
|
131 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
132 |
+
with Session() as session:
|
133 |
+
email = session.query(users.email).filter(users.id == user_id).one_or_none()
|
134 |
+
user_update = session.query(user_info).filter(user_info.email == email).one_or_none()
|
135 |
+
if user_update is not None:
|
136 |
+
user_update.photo_url = photo_url
|
137 |
+
session.commit()
|
138 |
+
session.close()
|
139 |
+
|
repository/UserLoginRepository.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
from models import Database_Entity
|
3 |
+
from repository import ConfigDatabase as cf
|
4 |
+
user_login = Database_Entity.UserLogin
|
5 |
+
users = Database_Entity.User
|
6 |
+
|
7 |
+
def getUserLogin(email: str) -> user_login:
|
8 |
+
try:
|
9 |
+
engine = cf.get_db_engine()
|
10 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
11 |
+
with Session() as session:
|
12 |
+
user_record = session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
13 |
+
if user_record:
|
14 |
+
session.close()
|
15 |
+
return user_record
|
16 |
+
else:
|
17 |
+
session.close()
|
18 |
+
return None
|
19 |
+
except:
|
20 |
+
engine = cf.get_db_engine1()
|
21 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
22 |
+
with Session() as session:
|
23 |
+
user_record = session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
24 |
+
if user_record:
|
25 |
+
session.close()
|
26 |
+
return user_record
|
27 |
+
else:
|
28 |
+
session.close()
|
29 |
+
return None
|
30 |
+
|
31 |
+
|
32 |
+
def getUserLoginById(id: int) -> user_login:
|
33 |
+
try:
|
34 |
+
engine = cf.get_db_engine()
|
35 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
36 |
+
with Session() as session:
|
37 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
38 |
+
user_record = session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
39 |
+
if user_record:
|
40 |
+
session.close()
|
41 |
+
return user_record
|
42 |
+
else:
|
43 |
+
session.close()
|
44 |
+
return None
|
45 |
+
except:
|
46 |
+
engine = cf.get_db_engine1()
|
47 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
48 |
+
with Session() as session:
|
49 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
50 |
+
user_record = session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
51 |
+
if user_record:
|
52 |
+
session.close()
|
53 |
+
return user_record
|
54 |
+
else:
|
55 |
+
session.close()
|
56 |
+
return None
|
57 |
+
|
58 |
+
def addUserLogin(user_email: str, session_id : str) -> None:
|
59 |
+
try:
|
60 |
+
engine = cf.get_db_engine()
|
61 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
62 |
+
with Session() as session:
|
63 |
+
new_user = user_login(
|
64 |
+
user_email = user_email,
|
65 |
+
user_session_id = session_id
|
66 |
+
)
|
67 |
+
session.add(new_user)
|
68 |
+
session.commit()
|
69 |
+
session.close()
|
70 |
+
except:
|
71 |
+
engine = cf.get_db_engine1()
|
72 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
73 |
+
with Session() as session:
|
74 |
+
new_user = user_login(
|
75 |
+
user_email = user_email,
|
76 |
+
user_session_id = session_id
|
77 |
+
)
|
78 |
+
session.add(new_user)
|
79 |
+
session.commit()
|
80 |
+
session.close()
|
81 |
+
|
82 |
+
|
83 |
+
def updateUserLogin(email: str, session_id : str ) -> None:
|
84 |
+
try:
|
85 |
+
engine = cf.get_db_engine()
|
86 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
87 |
+
with Session() as session:
|
88 |
+
user_update= session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
89 |
+
if user_update is not None:
|
90 |
+
user_update.user_session_id = session_id
|
91 |
+
session.commit()
|
92 |
+
session.close()
|
93 |
+
except:
|
94 |
+
engine = cf.get_db_engine1()
|
95 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
96 |
+
with Session() as session:
|
97 |
+
user_update= session.query(user_login).filter(user_login.user_email == email).one_or_none()
|
98 |
+
if user_update is not None:
|
99 |
+
user_update.user_session_id = session_id
|
100 |
+
session.commit()
|
101 |
+
session.close()
|
102 |
+
|
103 |
+
|
104 |
+
|
105 |
+
def getUserSessionIdByUserEmail(id: int) -> user_login:
|
106 |
+
try:
|
107 |
+
engine = cf.get_db_engine()
|
108 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
109 |
+
with Session() as session:
|
110 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
111 |
+
session.commit()
|
112 |
+
user_record= session.query(user_login.user_session_id).filter(user_login.user_email == email).one_or_none()[0]
|
113 |
+
session.commit()
|
114 |
+
print(user_record)
|
115 |
+
if user_record:
|
116 |
+
session.close()
|
117 |
+
return user_record
|
118 |
+
else:
|
119 |
+
session.close()
|
120 |
+
return None
|
121 |
+
except:
|
122 |
+
engine = cf.get_db_engine1()
|
123 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
124 |
+
with Session() as session:
|
125 |
+
email = session.query(users.email).filter(users.id == id).one_or_none()[0]
|
126 |
+
session.commit()
|
127 |
+
user_record= session.query(user_login.user_session_id).filter(user_login.user_email == email).one_or_none()[0]
|
128 |
+
session.commit()
|
129 |
+
print(user_record)
|
130 |
+
if user_record:
|
131 |
+
session.close()
|
132 |
+
return user_record
|
133 |
+
else:
|
134 |
+
session.close()
|
135 |
+
return None
|
repository/UserRepository.py
ADDED
@@ -0,0 +1,357 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy.orm import sessionmaker
|
2 |
+
from models import Database_Entity
|
3 |
+
from repository import ConfigDatabase as cf
|
4 |
+
import pytz , datetime
|
5 |
+
from datetime import timedelta
|
6 |
+
user = Database_Entity.User
|
7 |
+
|
8 |
+
def getUserIdByAccessToken(token:str) -> int:
|
9 |
+
try:
|
10 |
+
engine = cf.get_db_engine()
|
11 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
12 |
+
with Session() as session:
|
13 |
+
user_record = session.query(user.id).filter(user.access_token == token).one_or_none()
|
14 |
+
session.close()
|
15 |
+
return user_record
|
16 |
+
except:
|
17 |
+
engine = cf.get_db_engine1()
|
18 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
19 |
+
with Session() as session:
|
20 |
+
user_record = session.query(user.id).filter(user.access_token == token).one_or_none()
|
21 |
+
session.close()
|
22 |
+
return user_record
|
23 |
+
|
24 |
+
def getUserIdByAccessTokenAndUserId(token:str,user_id: int) -> int:
|
25 |
+
try:
|
26 |
+
engine = cf.get_db_engine()
|
27 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
28 |
+
with Session() as session:
|
29 |
+
user_record = session.query(user.id).filter(user.access_token == token,user.id == user_id).one_or_none()
|
30 |
+
session.close()
|
31 |
+
return user_record
|
32 |
+
except:
|
33 |
+
engine = cf.get_db_engine1()
|
34 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
35 |
+
with Session() as session:
|
36 |
+
user_record = session.query(user.id).filter(user.access_token == token,user.id == user_id).one_or_none()
|
37 |
+
session.close()
|
38 |
+
return user_record
|
39 |
+
|
40 |
+
def getUserByEmail(email: str) -> user:
|
41 |
+
try:
|
42 |
+
engine = cf.get_db_engine()
|
43 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
44 |
+
with Session() as session:
|
45 |
+
user_record = session.query(user).filter(user.email == email).one_or_none()
|
46 |
+
session.close()
|
47 |
+
return user_record
|
48 |
+
except:
|
49 |
+
engine = cf.get_db_engine1()
|
50 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
51 |
+
with Session() as session:
|
52 |
+
user_record = session.query(user).filter(user.email == email).one_or_none()
|
53 |
+
session.close()
|
54 |
+
return user_record
|
55 |
+
|
56 |
+
def getUserIdByEmail(email: str) -> user.id:
|
57 |
+
try:
|
58 |
+
engine = cf.get_db_engine()
|
59 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
60 |
+
with Session() as session:
|
61 |
+
user_record = session.query(user.id).filter(user.email == email).one_or_none()[0]
|
62 |
+
session.close()
|
63 |
+
return user_record
|
64 |
+
except:
|
65 |
+
engine = cf.get_db_engine1()
|
66 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
67 |
+
with Session() as session:
|
68 |
+
user_record = session.query(user.id).filter(user.email == email).one_or_none()[0]
|
69 |
+
session.close()
|
70 |
+
return user_record
|
71 |
+
|
72 |
+
def getUserById(user_id: str) -> user:
|
73 |
+
try:
|
74 |
+
engine = cf.get_db_engine()
|
75 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
76 |
+
with Session() as session:
|
77 |
+
user_record = session.query(user).filter(user.id == user_id).one_or_none()
|
78 |
+
session.close()
|
79 |
+
return user_record
|
80 |
+
except:
|
81 |
+
engine = cf.get_db_engine1()
|
82 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
83 |
+
with Session() as session:
|
84 |
+
user_record = session.query(user).filter(user.id == user_id).one_or_none()
|
85 |
+
session.close()
|
86 |
+
return user_record
|
87 |
+
|
88 |
+
def getRefreshTokenUserByAccessToken(token: str) -> user.refresh_token:
|
89 |
+
try:
|
90 |
+
engine = cf.get_db_engine()
|
91 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
92 |
+
with Session() as session:
|
93 |
+
user_record = session.query(user.refresh_token).filter(user.access_token == token).one_or_none()
|
94 |
+
session.close()
|
95 |
+
return user_record
|
96 |
+
except:
|
97 |
+
engine = cf.get_db_engine1()
|
98 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
99 |
+
with Session() as session:
|
100 |
+
user_record = session.query(user.refresh_token).filter(user.access_token == token).one_or_none()
|
101 |
+
session.close()
|
102 |
+
return user_record
|
103 |
+
|
104 |
+
|
105 |
+
def getUserIdByRefreshToken(refreshToken: str) -> user.refresh_token:
|
106 |
+
try:
|
107 |
+
engine = cf.get_db_engine()
|
108 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
109 |
+
with Session() as session:
|
110 |
+
user_record = session.query(user.id).filter(user.refresh_token == refreshToken).one_or_none()[0]
|
111 |
+
session.close()
|
112 |
+
return user_record
|
113 |
+
except:
|
114 |
+
engine = cf.get_db_engine1()
|
115 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
116 |
+
with Session() as session:
|
117 |
+
user_record = session.query(user.id).filter(user.refresh_token == refreshToken).one_or_none()[0]
|
118 |
+
session.close()
|
119 |
+
return user_record
|
120 |
+
|
121 |
+
def getRefreshTokenUserById(user_id: str) -> user.refresh_token:
|
122 |
+
try:
|
123 |
+
engine = cf.get_db_engine()
|
124 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
125 |
+
with Session() as session:
|
126 |
+
user_record = session.query(user.refresh_token).filter(user.id == user_id).one_or_none()[0]
|
127 |
+
session.close()
|
128 |
+
return user_record
|
129 |
+
except:
|
130 |
+
engine = cf.get_db_engine1()
|
131 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
132 |
+
with Session() as session:
|
133 |
+
user_record = session.query(user.refresh_token).filter(user.id == user_id).one_or_none()[0]
|
134 |
+
session.close()
|
135 |
+
return user_record
|
136 |
+
|
137 |
+
def getEmailUser(email:str) -> user.email:
|
138 |
+
try:
|
139 |
+
engine = cf.get_db_engine()
|
140 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
141 |
+
with Session() as session:
|
142 |
+
user_email = session.query(user.email).filter(user.email == email).one_or_none()
|
143 |
+
session.close()
|
144 |
+
return user_email
|
145 |
+
except:
|
146 |
+
engine = cf.get_db_engine1()
|
147 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
148 |
+
with Session() as session:
|
149 |
+
user_email = session.query(user.email).filter(user.email == email).one_or_none()
|
150 |
+
session.close()
|
151 |
+
return user_email
|
152 |
+
|
153 |
+
def getEmailUserById(user_id:int) -> user.email:
|
154 |
+
try:
|
155 |
+
engine = cf.get_db_engine()
|
156 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
157 |
+
with Session() as session:
|
158 |
+
user_email = session.query(user.email).filter(user.id == user_id).one_or_none()[0]
|
159 |
+
session.close()
|
160 |
+
return user_email
|
161 |
+
except:
|
162 |
+
engine = cf.get_db_engine1()
|
163 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
164 |
+
with Session() as session:
|
165 |
+
user_email = session.query(user.email).filter(user.id == user_id).one_or_none()[0]
|
166 |
+
session.close()
|
167 |
+
return user_email
|
168 |
+
|
169 |
+
def getEmailUserByIdFix(user_id:int) -> user.email:
|
170 |
+
try:
|
171 |
+
engine = cf.get_db_engine()
|
172 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
173 |
+
with Session() as session:
|
174 |
+
user_email = session.query(user.email).filter(user.id == user_id).one_or_none()
|
175 |
+
session.close()
|
176 |
+
return user_email
|
177 |
+
except:
|
178 |
+
engine = cf.get_db_engine1()
|
179 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
180 |
+
with Session() as session:
|
181 |
+
user_email = session.query(user.email).filter(user.id == user_id).one_or_none()
|
182 |
+
session.close()
|
183 |
+
return user_email
|
184 |
+
|
185 |
+
|
186 |
+
|
187 |
+
def getEmailUserByAccessToken(token: str) -> user.email:
|
188 |
+
try:
|
189 |
+
engine = cf.get_db_engine()
|
190 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
191 |
+
with Session() as session:
|
192 |
+
user_email = session.query(user.email).filter(user.access_token == token).one_or_none()
|
193 |
+
session.close()
|
194 |
+
return user_email
|
195 |
+
except:
|
196 |
+
engine = cf.get_db_engine1()
|
197 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
198 |
+
with Session() as session:
|
199 |
+
user_email = session.query(user.email).filter(user.access_token == token).one_or_none()
|
200 |
+
session.close()
|
201 |
+
return user_email
|
202 |
+
|
203 |
+
|
204 |
+
def addUser(email: str, access_token: str, refresh_token: str, expires_at: datetime.datetime) -> None:
|
205 |
+
try:
|
206 |
+
engine = cf.get_db_engine()
|
207 |
+
Session = sessionmaker(bind=engine)
|
208 |
+
with Session() as session:
|
209 |
+
new_user = Database_Entity.User(
|
210 |
+
email=email,
|
211 |
+
access_token=access_token,
|
212 |
+
refresh_token=refresh_token,
|
213 |
+
expires_at=expires_at
|
214 |
+
)
|
215 |
+
session.add(new_user)
|
216 |
+
session.commit()
|
217 |
+
session.close()
|
218 |
+
except:
|
219 |
+
engine = cf.get_db_engine1()
|
220 |
+
Session = sessionmaker(bind=engine)
|
221 |
+
with Session() as session:
|
222 |
+
new_user = Database_Entity.User(
|
223 |
+
email=email,
|
224 |
+
access_token=access_token,
|
225 |
+
refresh_token=refresh_token,
|
226 |
+
expires_at=expires_at
|
227 |
+
)
|
228 |
+
session.add(new_user)
|
229 |
+
session.commit()
|
230 |
+
session.close()
|
231 |
+
|
232 |
+
|
233 |
+
def updateUserLogin(email: str, access_token: str, refresh_token: str, expires_at: datetime.datetime) -> bool:
|
234 |
+
try:
|
235 |
+
engine = cf.get_db_engine()
|
236 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
237 |
+
with Session() as session:
|
238 |
+
user_update = session.query(user).filter(user.email == email).one_or_none()
|
239 |
+
if user_update:
|
240 |
+
user_update.email = email
|
241 |
+
user_update.access_token = access_token
|
242 |
+
user_update.refresh_token = refresh_token
|
243 |
+
user_update.expires_at = expires_at
|
244 |
+
session.commit()
|
245 |
+
session.close()
|
246 |
+
return True
|
247 |
+
else:
|
248 |
+
session.close()
|
249 |
+
return False
|
250 |
+
except:
|
251 |
+
engine = cf.get_db_engine1()
|
252 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
253 |
+
with Session() as session:
|
254 |
+
user_update = session.query(user).filter(user.email == email).one_or_none()
|
255 |
+
if user_update:
|
256 |
+
user_update.email = email
|
257 |
+
user_update.access_token = access_token
|
258 |
+
user_update.refresh_token = refresh_token
|
259 |
+
user_update.expires_at = expires_at
|
260 |
+
session.commit()
|
261 |
+
session.close()
|
262 |
+
return True
|
263 |
+
else:
|
264 |
+
session.close()
|
265 |
+
return False
|
266 |
+
|
267 |
+
|
268 |
+
def updateAccessToken(user_id: int,access_token: str, expires_at: datetime.datetime) -> None:
|
269 |
+
try:
|
270 |
+
engine = cf.get_db_engine()
|
271 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
272 |
+
with Session() as session:
|
273 |
+
user_update = session.query(user).filter(user.id == user_id).one_or_none()
|
274 |
+
if user_update:
|
275 |
+
user_update.access_token = access_token
|
276 |
+
user_update.expires_at = expires_at
|
277 |
+
session.commit()
|
278 |
+
except:
|
279 |
+
engine = cf.get_db_engine1()
|
280 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
281 |
+
with Session() as session:
|
282 |
+
user_update = session.query(user).filter(user.id == user_id).one_or_none()
|
283 |
+
if user_update:
|
284 |
+
user_update.access_token = access_token
|
285 |
+
user_update.expires_at = expires_at
|
286 |
+
session.commit()
|
287 |
+
|
288 |
+
def updateAccessTokenById(id: int,access_token: str, expires_at: datetime.datetime) -> None:
|
289 |
+
try:
|
290 |
+
engine = cf.get_db_engine()
|
291 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
292 |
+
with Session() as session:
|
293 |
+
user_update = session.query(user).filter(user.id == id).one_or_none()
|
294 |
+
if user_update:
|
295 |
+
user_update.access_token = access_token
|
296 |
+
user_update.expires_at = expires_at
|
297 |
+
session.commit()
|
298 |
+
session.close()
|
299 |
+
except:
|
300 |
+
engine = cf.get_db_engine1()
|
301 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
302 |
+
with Session() as session:
|
303 |
+
user_update = session.query(user).filter(user.id == id).one_or_none()
|
304 |
+
if user_update:
|
305 |
+
user_update.access_token = access_token
|
306 |
+
user_update.expires_at = expires_at
|
307 |
+
session.commit()
|
308 |
+
session.close()
|
309 |
+
|
310 |
+
|
311 |
+
def UpdateAccessTokenRefreshToken(email: str, access_token: str, refresh_token: str, expires_at: datetime.datetime) -> None:
|
312 |
+
try:
|
313 |
+
engine = cf.get_db_engine()
|
314 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
315 |
+
with Session() as session:
|
316 |
+
user_update = session.query(user).filter(user.email == email).one_or_none()
|
317 |
+
if user_update:
|
318 |
+
user_update.access_token = access_token
|
319 |
+
user_update.refresh_token = refresh_token
|
320 |
+
user_update.expires_at = expires_at
|
321 |
+
session.commit()
|
322 |
+
session.close()
|
323 |
+
except:
|
324 |
+
engine = cf.get_db_engine1()
|
325 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
326 |
+
with Session() as session:
|
327 |
+
user_update = session.query(user).filter(user.email == email).one_or_none()
|
328 |
+
if user_update:
|
329 |
+
user_update.access_token = access_token
|
330 |
+
user_update.refresh_token = refresh_token
|
331 |
+
user_update.expires_at = expires_at
|
332 |
+
session.commit()
|
333 |
+
session.close()
|
334 |
+
|
335 |
+
def UpdateAccessTokenRefreshTokenById(user_id: int,access_token: str, refresh_token: str, expires_at: datetime.datetime) -> None:
|
336 |
+
try:
|
337 |
+
engine = cf.get_db_engine()
|
338 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
339 |
+
with Session() as session:
|
340 |
+
user_update = session.query(user).filter(user.id == user_id).one_or_none()
|
341 |
+
if user_update:
|
342 |
+
user_update.access_token = access_token
|
343 |
+
user_update.refresh_token = refresh_token
|
344 |
+
user_update.expires_at = expires_at
|
345 |
+
session.commit()
|
346 |
+
session.close()
|
347 |
+
except:
|
348 |
+
engine = cf.get_db_engine1()
|
349 |
+
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
350 |
+
with Session() as session:
|
351 |
+
user_update = session.query(user).filter(user.id == user_id).one_or_none()
|
352 |
+
if user_update:
|
353 |
+
user_update.access_token = access_token
|
354 |
+
user_update.refresh_token = refresh_token
|
355 |
+
user_update.expires_at = expires_at
|
356 |
+
session.commit()
|
357 |
+
session.close()
|
repository/__pycache__/ChatHistoryRepository.cpython-310.pyc
ADDED
Binary file (6.44 kB). View file
|
|
repository/__pycache__/ChatHistoryRepository.cpython-311.pyc
ADDED
Binary file (19.8 kB). View file
|
|
repository/__pycache__/ConfigDatabase.cpython-310.pyc
ADDED
Binary file (1.03 kB). View file
|
|
repository/__pycache__/ConfigDatabase.cpython-311.pyc
ADDED
Binary file (1.78 kB). View file
|
|
repository/__pycache__/DetailChatRepository.cpython-310.pyc
ADDED
Binary file (3.5 kB). View file
|
|
repository/__pycache__/DetailChatRepository.cpython-311.pyc
ADDED
Binary file (8.13 kB). View file
|
|