Spaces:
Running
Running
Commit
·
aa92fe2
1
Parent(s):
817c2b8
Restoration des paramètres local Mistral
Browse files- app.py +5 -2
- requirements.txt +4 -0
app.py
CHANGED
@@ -10,6 +10,7 @@ from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings,
|
|
10 |
from llama_index.core.chat_engine.types import ChatMode
|
11 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
12 |
from llama_index.llms.openai_like import OpenAILike
|
|
|
13 |
|
14 |
PAGE_TITLE="Votre expert Scrum"
|
15 |
CHAT_TITLE="Posez-moi une question sur le guide Scrum 2020 (anglais ou français)"
|
@@ -18,12 +19,14 @@ SYSTEM_PROMPT="Use the context information provided to assist the user. Mention
|
|
18 |
EMBEDDING_MODEL="BAAI/bge-small-en-v1.5"
|
19 |
#EMBEDDING_MODEL="BAAI/bge-m3" # Multilingual large model
|
20 |
#LLM_MODEL="DeepSeek-R1-Distill-Llama-70B" # Available models on : https://chatapi.akash.network/documentation#models
|
21 |
-
LLM_MODEL="DeepSeek-R1-Distill-Qwen-32B"
|
|
|
22 |
NB_DOC_CHUNKS_TO_SEND=5
|
23 |
MAX_NB_TOKENS_IN_RESPONSE=1500
|
24 |
TEMPERATURE=0.5 # The closer to 1, the less deterministic and the more creative
|
25 |
|
26 |
-
API_BASE_URL="https://chatapi.akash.network/api/v1" # Changing
|
|
|
27 |
|
28 |
# Ajuster le chemin de torch.classes pour éviter le conflit
|
29 |
torch.classes.__path__ = []
|
|
|
10 |
from llama_index.core.chat_engine.types import ChatMode
|
11 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
12 |
from llama_index.llms.openai_like import OpenAILike
|
13 |
+
#from llama_index.llms.mistralai import MistralAI
|
14 |
|
15 |
PAGE_TITLE="Votre expert Scrum"
|
16 |
CHAT_TITLE="Posez-moi une question sur le guide Scrum 2020 (anglais ou français)"
|
|
|
19 |
EMBEDDING_MODEL="BAAI/bge-small-en-v1.5"
|
20 |
#EMBEDDING_MODEL="BAAI/bge-m3" # Multilingual large model
|
21 |
#LLM_MODEL="DeepSeek-R1-Distill-Llama-70B" # Available models on : https://chatapi.akash.network/documentation#models
|
22 |
+
#LLM_MODEL="DeepSeek-R1-Distill-Qwen-32B"
|
23 |
+
LLM_MODEL="mistralai/Mistral-7B-Instruct-v0.3"
|
24 |
NB_DOC_CHUNKS_TO_SEND=5
|
25 |
MAX_NB_TOKENS_IN_RESPONSE=1500
|
26 |
TEMPERATURE=0.5 # The closer to 1, the less deterministic and the more creative
|
27 |
|
28 |
+
#API_BASE_URL="https://chatapi.akash.network/api/v1" # Changing may require to adapt the custom_llm initialization
|
29 |
+
API_BASE_URL="http://192.168.18.32:3128" # Local Mistral
|
30 |
|
31 |
# Ajuster le chemin de torch.classes pour éviter le conflit
|
32 |
torch.classes.__path__ = []
|
requirements.txt
CHANGED
@@ -17,6 +17,7 @@ Deprecated==1.2.18
|
|
17 |
dirtyjson==1.0.8
|
18 |
distro==1.9.0
|
19 |
embeddings==0.0.8
|
|
|
20 |
filelock==3.17.0
|
21 |
filetype==1.2.0
|
22 |
frozenlist==1.5.0
|
@@ -32,10 +33,12 @@ idna==3.10
|
|
32 |
Jinja2==3.1.5
|
33 |
jiter==0.8.2
|
34 |
joblib==1.4.2
|
|
|
35 |
jsonschema==4.23.0
|
36 |
jsonschema-specifications==2024.10.1
|
37 |
llama-index-core==0.12.16.post1
|
38 |
llama-index-embeddings-huggingface==0.5.1
|
|
|
39 |
llama-index-llms-openai==0.3.18
|
40 |
llama-index-llms-openai-like==0.3.3
|
41 |
llama-index-readers-file==0.4.4
|
@@ -43,6 +46,7 @@ markdown-it-py==3.0.0
|
|
43 |
MarkupSafe==3.0.2
|
44 |
marshmallow==3.26.1
|
45 |
mdurl==0.1.2
|
|
|
46 |
mpmath==1.3.0
|
47 |
multidict==6.1.0
|
48 |
mypy-extensions==1.0.0
|
|
|
17 |
dirtyjson==1.0.8
|
18 |
distro==1.9.0
|
19 |
embeddings==0.0.8
|
20 |
+
eval_type_backport==0.2.2
|
21 |
filelock==3.17.0
|
22 |
filetype==1.2.0
|
23 |
frozenlist==1.5.0
|
|
|
33 |
Jinja2==3.1.5
|
34 |
jiter==0.8.2
|
35 |
joblib==1.4.2
|
36 |
+
jsonpath-python==1.0.6
|
37 |
jsonschema==4.23.0
|
38 |
jsonschema-specifications==2024.10.1
|
39 |
llama-index-core==0.12.16.post1
|
40 |
llama-index-embeddings-huggingface==0.5.1
|
41 |
+
llama-index-llms-mistralai==0.3.3
|
42 |
llama-index-llms-openai==0.3.18
|
43 |
llama-index-llms-openai-like==0.3.3
|
44 |
llama-index-readers-file==0.4.4
|
|
|
46 |
MarkupSafe==3.0.2
|
47 |
marshmallow==3.26.1
|
48 |
mdurl==0.1.2
|
49 |
+
mistralai==1.5.0
|
50 |
mpmath==1.3.0
|
51 |
multidict==6.1.0
|
52 |
mypy-extensions==1.0.0
|