File size: 676 Bytes
f67c3aa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
import os
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from api.embedding_models.embedding_config import EmbeddingConfig
from llama_index.core import Settings
root_path = os.environ['ROOT_PATH']
MODEL_DIRECTORY = os.path.join(root_path,"model")
def EmbeddingModel():
current_file_path = os.path.abspath(__file__)
embedding_model_dir = os.path.dirname(current_file_path)
api_dir = os.path.dirname(embedding_model_dir)
model_dir = os.path.join(api_dir,MODEL_DIRECTORY)
Settings.embed_model = HuggingFaceEmbedding(model_name = "BAAI/bge-m3",
cache_folder = model_dir)
return EmbeddingConfig()
|