Spaces:
Sleeping
Sleeping
Sandaruth
commited on
Commit
•
3e7ea7a
1
Parent(s):
9a9ff62
temp
Browse files- .gitignore +6 -0
- color.py +14 -0
- model.py +89 -0
- requirements.txt +99 -0
.gitignore
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# environment variables
|
2 |
+
.env
|
3 |
+
|
4 |
+
|
5 |
+
# test files
|
6 |
+
/kk.ipynb
|
color.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
def print_colorful_msg(msg, color='white'):
|
2 |
+
colors = {
|
3 |
+
'red': '\033[91m',
|
4 |
+
'green': '\033[92m',
|
5 |
+
'yellow': '\033[93m',
|
6 |
+
'blue': '\033[94m',
|
7 |
+
'magenta': '\033[95m',
|
8 |
+
'cyan': '\033[96m',
|
9 |
+
'white': '\033[97m',
|
10 |
+
'bold': '\033[1m',
|
11 |
+
'underline': '\033[4m',
|
12 |
+
'end': '\033[0m'
|
13 |
+
}
|
14 |
+
print(f"{colors[color]}{msg}{colors['end']}")
|
model.py
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
|
4 |
+
# Load environment variables from .env file
|
5 |
+
load_dotenv()
|
6 |
+
|
7 |
+
# Access the value of OPENAI_API_KEY
|
8 |
+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
9 |
+
|
10 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
11 |
+
|
12 |
+
from langchain_openai import ChatOpenAI
|
13 |
+
|
14 |
+
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0,)
|
15 |
+
## Create embeddings and splitter
|
16 |
+
|
17 |
+
from langchain.embeddings import HuggingFaceBgeEmbeddings
|
18 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
19 |
+
|
20 |
+
# Create Embeddings
|
21 |
+
model_name = "BAAI/bge-large-en"
|
22 |
+
|
23 |
+
embedding = HuggingFaceBgeEmbeddings(
|
24 |
+
model_name = model_name,
|
25 |
+
model_kwargs = {'device':'cuda'},
|
26 |
+
encode_kwargs = {'normalize_embeddings': True}
|
27 |
+
)
|
28 |
+
|
29 |
+
# Create Splitter
|
30 |
+
splitter = RecursiveCharacterTextSplitter(
|
31 |
+
chunk_size=1000,
|
32 |
+
chunk_overlap=100,
|
33 |
+
)
|
34 |
+
|
35 |
+
from langchain_community.vectorstores import FAISS
|
36 |
+
|
37 |
+
persitsdirectory="/NLP/ATrad/ATrad/db/faiss_Test02_500_C_BGE_large"
|
38 |
+
|
39 |
+
vectorstore= FAISS.load_local(persitsdirectory, embedding)
|
40 |
+
|
41 |
+
# Define a custom prompt for Unser manual
|
42 |
+
from langchain.prompts import PromptTemplate
|
43 |
+
|
44 |
+
qa_template = ("""
|
45 |
+
You are the AI assistant of the IronOne Technologies which provide services for companies members and novice users with learning with ATrad Aplication .
|
46 |
+
You have provided context information below related to learning material.
|
47 |
+
|
48 |
+
Context: {context}
|
49 |
+
|
50 |
+
Given this information, please answer the question with the latest information.
|
51 |
+
If you dont know the answer say you dont know, dont try to makeup answers.
|
52 |
+
if context is not enough to answer the question, ask for more information.
|
53 |
+
if context is not related to the question, say I dont know.
|
54 |
+
|
55 |
+
each answer should start with code word ATrad Ai(QA):
|
56 |
+
|
57 |
+
Question: {question}
|
58 |
+
|
59 |
+
answer: let me think about it...""")
|
60 |
+
|
61 |
+
qa_template2 = ("""
|
62 |
+
Welcome to IronOne Technologies' AI Assistant, designed to assist you in learning with the ATrad Application.
|
63 |
+
|
64 |
+
Context: {context}
|
65 |
+
|
66 |
+
As your AI assistant, I'm here to help you navigate through learning materials and provide guidance.
|
67 |
+
Please provide me with any questions or concerns you have regarding the ATrad Application.
|
68 |
+
If you're unsure about something or need more information, feel free to ask.
|
69 |
+
|
70 |
+
Question: {question}
|
71 |
+
|
72 |
+
ATrad Ai(QA): Let me think about it...""")
|
73 |
+
|
74 |
+
|
75 |
+
QA_PROMPT = PromptTemplate(input_variables=["context", "question"],template=qa_template2,)
|
76 |
+
|
77 |
+
|
78 |
+
# Chain for Web
|
79 |
+
from langchain.chains import RetrievalQA
|
80 |
+
|
81 |
+
Web_qa = RetrievalQA.from_chain_type(
|
82 |
+
llm=llm,
|
83 |
+
chain_type="stuff",
|
84 |
+
retriever = vectorstore.as_retriever(search_kwargs={"k": 4}),
|
85 |
+
return_source_documents= True,
|
86 |
+
input_key="question",
|
87 |
+
chain_type_kwargs={"prompt": QA_PROMPT},
|
88 |
+
)
|
89 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiohttp==3.9.3
|
2 |
+
aiosignal==1.3.1
|
3 |
+
annotated-types==0.6.0
|
4 |
+
anyio==4.2.0
|
5 |
+
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1698341106958/work
|
6 |
+
async-timeout==4.0.3
|
7 |
+
attrs==23.2.0
|
8 |
+
certifi==2024.2.2
|
9 |
+
charset-normalizer==3.3.2
|
10 |
+
click==8.1.7
|
11 |
+
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1666700638685/work
|
12 |
+
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1704278392174/work
|
13 |
+
dataclasses-json==0.6.4
|
14 |
+
debugpy @ file:///C:/b/abs_c0y1fjipt2/croot/debugpy_1690906864587/work
|
15 |
+
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1641555617451/work
|
16 |
+
distro==1.9.0
|
17 |
+
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1704921103267/work
|
18 |
+
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1698579936712/work
|
19 |
+
faiss-cpu==1.7.4
|
20 |
+
filelock==3.13.1
|
21 |
+
frozenlist==1.4.1
|
22 |
+
fsspec==2024.2.0
|
23 |
+
greenlet==3.0.3
|
24 |
+
h11==0.14.0
|
25 |
+
httpcore==1.0.3
|
26 |
+
httpx==0.26.0
|
27 |
+
huggingface-hub==0.20.3
|
28 |
+
idna==3.6
|
29 |
+
importlib-metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1703269254275/work
|
30 |
+
ipykernel @ file:///D:/bld/ipykernel_1707326433944/work
|
31 |
+
ipython @ file:///D:/bld/ipython_1706795789991/work
|
32 |
+
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1696326070614/work
|
33 |
+
Jinja2==3.1.3
|
34 |
+
joblib==1.3.2
|
35 |
+
jsonpatch==1.33
|
36 |
+
jsonpointer==2.4
|
37 |
+
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1699283905679/work
|
38 |
+
jupyter_core @ file:///D:/bld/jupyter_core_1704727156030/work
|
39 |
+
langchain==0.1.7
|
40 |
+
langchain-community==0.0.20
|
41 |
+
langchain-core==0.1.23
|
42 |
+
langchain-openai==0.0.6
|
43 |
+
langsmith==0.0.87
|
44 |
+
MarkupSafe==2.1.5
|
45 |
+
marshmallow==3.20.2
|
46 |
+
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1660814786464/work
|
47 |
+
mpmath==1.3.0
|
48 |
+
multidict==6.0.5
|
49 |
+
mypy-extensions==1.0.0
|
50 |
+
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1705850609492/work
|
51 |
+
networkx==3.2.1
|
52 |
+
nltk==3.8.1
|
53 |
+
numpy==1.26.4
|
54 |
+
openai==1.12.0
|
55 |
+
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1696202382185/work
|
56 |
+
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1638334955874/work
|
57 |
+
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1602536217715/work
|
58 |
+
pillow==10.2.0
|
59 |
+
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/platformdirs_1706713388748/work
|
60 |
+
prompt-toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1702399386289/work
|
61 |
+
psutil @ file:///C:/Windows/Temp/abs_b2c2fd7f-9fd5-4756-95ea-8aed74d0039flsd9qufz/croots/recipe/psutil_1656431277748/work
|
62 |
+
pure-eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1642875951954/work
|
63 |
+
pydantic==2.6.1
|
64 |
+
pydantic_core==2.16.2
|
65 |
+
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1700607939962/work
|
66 |
+
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1626286286081/work
|
67 |
+
python-dotenv==1.0.1
|
68 |
+
pywin32==305.1
|
69 |
+
PyYAML==6.0.1
|
70 |
+
pyzmq @ file:///C:/b/abs_89aq69t0up/croot/pyzmq_1705605705281/work
|
71 |
+
regex==2023.12.25
|
72 |
+
requests==2.31.0
|
73 |
+
safetensors==0.4.2
|
74 |
+
scikit-learn==1.4.1.post1
|
75 |
+
scipy==1.12.0
|
76 |
+
sentence-transformers==2.3.1
|
77 |
+
sentencepiece==0.1.99
|
78 |
+
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
|
79 |
+
sniffio==1.3.0
|
80 |
+
SQLAlchemy==2.0.27
|
81 |
+
stack-data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1669632077133/work
|
82 |
+
sympy==1.12
|
83 |
+
tenacity==8.2.3
|
84 |
+
threadpoolctl==3.3.0
|
85 |
+
tiktoken==0.6.0
|
86 |
+
tokenizers==0.15.2
|
87 |
+
torch==2.2.0+cu121
|
88 |
+
torchaudio==2.2.0+cu121
|
89 |
+
torchvision==0.17.0+cu121
|
90 |
+
tornado @ file:///D:/bld/tornado_1656937966227/work
|
91 |
+
tqdm==4.66.2
|
92 |
+
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1704212992681/work
|
93 |
+
transformers==4.37.2
|
94 |
+
typing-inspect==0.9.0
|
95 |
+
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/typing_extensions_1702176139754/work
|
96 |
+
urllib3==2.2.1
|
97 |
+
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1704731205417/work
|
98 |
+
yarl==1.9.4
|
99 |
+
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1695255097490/work
|