Spaces:
Running
Running
seanpedrickcase
commited on
Commit
•
49e0db8
1
Parent(s):
08eb30d
Added and replaced relevant files to download in download_model.py to allow for app use on AWS
Browse files- Dockerfile +2 -2
- download_model.py +10 -3
Dockerfile
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm AS builder
|
3 |
|
4 |
# Install Lambda web adapter in case you want to run with with an AWS Lamba function URL (not essential if not using Lambda)
|
5 |
-
#COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.
|
6 |
|
7 |
# Install system dependencies
|
8 |
RUN apt-get update && rm -rf /var/lib/apt/lists/*
|
@@ -15,7 +15,7 @@ WORKDIR /src
|
|
15 |
# Copy requirements file and install dependencies. Sentence transformers and Bertopic are installed without dependencies so that torch is not reinstalled.
|
16 |
COPY requirements_aws.txt .
|
17 |
|
18 |
-
RUN pip install torch==2.
|
19 |
&& pip install --no-cache-dir --target=/install sentence-transformers==3.2.0 --no-deps \
|
20 |
&& pip install --no-cache-dir --target=/install bertopic==0.16.4 --no-deps \
|
21 |
&& pip install --no-cache-dir --target=/install -r requirements_aws.txt \
|
|
|
2 |
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm AS builder
|
3 |
|
4 |
# Install Lambda web adapter in case you want to run with with an AWS Lamba function URL (not essential if not using Lambda)
|
5 |
+
#COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.4 /lambda-adapter /opt/extensions/lambda-adapter
|
6 |
|
7 |
# Install system dependencies
|
8 |
RUN apt-get update && rm -rf /var/lib/apt/lists/*
|
|
|
15 |
# Copy requirements file and install dependencies. Sentence transformers and Bertopic are installed without dependencies so that torch is not reinstalled.
|
16 |
COPY requirements_aws.txt .
|
17 |
|
18 |
+
RUN pip install torch==2.5.1+cpu --target=/install --index-url https://download.pytorch.org/whl/cpu \
|
19 |
&& pip install --no-cache-dir --target=/install sentence-transformers==3.2.0 --no-deps \
|
20 |
&& pip install --no-cache-dir --target=/install bertopic==0.16.4 --no-deps \
|
21 |
&& pip install --no-cache-dir --target=/install -r requirements_aws.txt \
|
download_model.py
CHANGED
@@ -1,15 +1,22 @@
|
|
1 |
from huggingface_hub import hf_hub_download
|
2 |
|
3 |
# Define the repository and files to download
|
4 |
-
repo_id = "mixedbread-ai/mxbai-embed-xsmall-v1" #"sentence-transformers/all-MiniLM-L6-v2"
|
5 |
files_to_download = [
|
6 |
"config.json",
|
7 |
-
"
|
|
|
|
|
|
|
|
|
|
|
8 |
"tokenizer_config.json",
|
9 |
"vocab.txt"
|
10 |
]
|
11 |
|
|
|
|
|
12 |
# Download each file and save it to the /model/bge directory
|
13 |
for file_name in files_to_download:
|
14 |
print("Checking for file", file_name)
|
15 |
-
hf_hub_download(repo_id=repo_id, filename=file_name, local_dir="/model/embed")
|
|
|
1 |
from huggingface_hub import hf_hub_download
|
2 |
|
3 |
# Define the repository and files to download
|
4 |
+
repo_id = "mixedbread-ai/mxbai-embed-xsmall-v1" #"sentence-transformers/all-MiniLM-L6-v2"
|
5 |
files_to_download = [
|
6 |
"config.json",
|
7 |
+
"config_sentence_transformers.json",
|
8 |
+
"model.safetensors",
|
9 |
+
"tokenizer.json",
|
10 |
+
"special_tokens_map.json",
|
11 |
+
"angle_config.json",
|
12 |
+
"modules.json",
|
13 |
"tokenizer_config.json",
|
14 |
"vocab.txt"
|
15 |
]
|
16 |
|
17 |
+
#"pytorch_model.bin",
|
18 |
+
|
19 |
# Download each file and save it to the /model/bge directory
|
20 |
for file_name in files_to_download:
|
21 |
print("Checking for file", file_name)
|
22 |
+
hf_hub_download(repo_id=repo_id, filename=file_name, local_dir="/model/embed")
|