add docker for mast3r demo (mostly taken from dust3r)
Browse files- README.md +26 -1
- docker/docker-compose-cpu.yml +16 -0
- docker/docker-compose-cuda.yml +23 -0
- docker/files/cpu.Dockerfile +39 -0
- docker/files/cuda.Dockerfile +29 -0
- docker/files/entrypoint.sh +8 -0
- docker/run.sh +68 -0
README.md
CHANGED
@@ -133,7 +133,32 @@ see https://github.com/naver/dust3r?tab=readme-ov-file#interactive-demo for deta
|
|
133 |
|
134 |
### Interactive demo with docker
|
135 |
|
136 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
|
138 |
![demo](assets/demo.jpg)
|
139 |
|
|
|
133 |
|
134 |
### Interactive demo with docker
|
135 |
|
136 |
+
To run MASt3R using Docker, including with NVIDIA CUDA support, follow these instructions:
|
137 |
+
|
138 |
+
1. **Install Docker**: If not already installed, download and install `docker` and `docker compose` from the [Docker website](https://www.docker.com/get-started).
|
139 |
+
|
140 |
+
2. **Install NVIDIA Docker Toolkit**: For GPU support, install the NVIDIA Docker toolkit from the [Nvidia website](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html).
|
141 |
+
|
142 |
+
3. **Build the Docker image and run it**: `cd` into the `./docker` directory and run the following commands:
|
143 |
+
|
144 |
+
```bash
|
145 |
+
cd docker
|
146 |
+
bash run.sh --with-cuda --model_name="MASt3R_ViTLarge_BaseDecoder_512_catmlpdpt_metric"
|
147 |
+
```
|
148 |
+
|
149 |
+
Or if you want to run the demo without CUDA support, run the following command:
|
150 |
+
|
151 |
+
```bash
|
152 |
+
cd docker
|
153 |
+
bash run.sh --model_name="MASt3R_ViTLarge_BaseDecoder_512_catmlpdpt_metric"
|
154 |
+
```
|
155 |
+
|
156 |
+
By default, `demo.py` is lanched with the option `--local_network`.
|
157 |
+
Visit `http://localhost:7860/` to access the web UI (or replace `localhost` with the machine's name to access it from the network).
|
158 |
+
|
159 |
+
`run.sh` will launch docker-compose using either the [docker-compose-cuda.yml](docker/docker-compose-cuda.yml) or [docker-compose-cpu.ym](docker/docker-compose-cpu.yml) config file, then it starts the demo using [entrypoint.sh](docker/files/entrypoint.sh).
|
160 |
+
|
161 |
+
___
|
162 |
|
163 |
![demo](assets/demo.jpg)
|
164 |
|
docker/docker-compose-cpu.yml
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
services:
|
3 |
+
mast3r-demo:
|
4 |
+
build:
|
5 |
+
context: ./files
|
6 |
+
dockerfile: cpu.Dockerfile
|
7 |
+
ports:
|
8 |
+
- "7860:7860"
|
9 |
+
volumes:
|
10 |
+
- ./files/checkpoints:/mast3r/checkpoints
|
11 |
+
environment:
|
12 |
+
- DEVICE=cpu
|
13 |
+
- MODEL=${MODEL:-MASt3R_ViTLarge_BaseDecoder_512_catmlpdpt_metric.pth}
|
14 |
+
cap_add:
|
15 |
+
- IPC_LOCK
|
16 |
+
- SYS_RESOURCE
|
docker/docker-compose-cuda.yml
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
services:
|
3 |
+
mast3r-demo:
|
4 |
+
build:
|
5 |
+
context: ./files
|
6 |
+
dockerfile: cuda.Dockerfile
|
7 |
+
ports:
|
8 |
+
- "7860:7860"
|
9 |
+
environment:
|
10 |
+
- DEVICE=cuda
|
11 |
+
- MODEL=${MODEL:-MASt3R_ViTLarge_BaseDecoder_512_catmlpdpt_metric.pth}
|
12 |
+
volumes:
|
13 |
+
- ./files/checkpoints:/mast3r/checkpoints
|
14 |
+
cap_add:
|
15 |
+
- IPC_LOCK
|
16 |
+
- SYS_RESOURCE
|
17 |
+
deploy:
|
18 |
+
resources:
|
19 |
+
reservations:
|
20 |
+
devices:
|
21 |
+
- driver: nvidia
|
22 |
+
count: 1
|
23 |
+
capabilities: [gpu]
|
docker/files/cpu.Dockerfile
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11-slim
|
2 |
+
|
3 |
+
LABEL description="Docker container for MASt3R with dependencies installed. CPU VERSION"
|
4 |
+
|
5 |
+
ENV DEVICE="cpu"
|
6 |
+
ENV MODEL="MASt3R_ViTLarge_BaseDecoder_512_dpt.pth"
|
7 |
+
ARG DEBIAN_FRONTEND=noninteractive
|
8 |
+
|
9 |
+
RUN apt-get update && apt-get install -y \
|
10 |
+
git \
|
11 |
+
libgl1-mesa-glx \
|
12 |
+
libegl1-mesa \
|
13 |
+
libxrandr2 \
|
14 |
+
libxrandr2 \
|
15 |
+
libxss1 \
|
16 |
+
libxcursor1 \
|
17 |
+
libxcomposite1 \
|
18 |
+
libasound2 \
|
19 |
+
libxi6 \
|
20 |
+
libxtst6 \
|
21 |
+
libglib2.0-0 \
|
22 |
+
&& apt-get clean \
|
23 |
+
&& rm -rf /var/lib/apt/lists/*
|
24 |
+
|
25 |
+
RUN git clone --recursive https://github.com/naver/mast3r /mast3r
|
26 |
+
WORKDIR /mast3r/dust3r
|
27 |
+
|
28 |
+
RUN pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cpu
|
29 |
+
RUN pip install -r requirements.txt
|
30 |
+
RUN pip install -r requirements_optional.txt
|
31 |
+
RUN pip install opencv-python==4.8.0.74
|
32 |
+
|
33 |
+
WORKDIR /mast3r
|
34 |
+
RUN pip install -r requirements.txt
|
35 |
+
|
36 |
+
COPY entrypoint.sh /entrypoint.sh
|
37 |
+
RUN chmod +x /entrypoint.sh
|
38 |
+
|
39 |
+
ENTRYPOINT ["/entrypoint.sh"]
|
docker/files/cuda.Dockerfile
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvcr.io/nvidia/pytorch:24.01-py3
|
2 |
+
|
3 |
+
LABEL description="Docker container for MASt3R with dependencies installed. CUDA VERSION"
|
4 |
+
ENV DEVICE="cuda"
|
5 |
+
ENV MODEL="MASt3R_ViTLarge_BaseDecoder_512_dpt.pth"
|
6 |
+
ARG DEBIAN_FRONTEND=noninteractive
|
7 |
+
|
8 |
+
RUN apt-get update && apt-get install -y \
|
9 |
+
git=1:2.34.1-1ubuntu1.10 \
|
10 |
+
libglib2.0-0=2.72.4-0ubuntu2.2 \
|
11 |
+
&& apt-get clean \
|
12 |
+
&& rm -rf /var/lib/apt/lists/*
|
13 |
+
|
14 |
+
RUN git clone --recursive https://github.com/naver/mast3r /mast3r
|
15 |
+
WORKDIR /mast3r/dust3r
|
16 |
+
RUN pip install -r requirements.txt
|
17 |
+
RUN pip install -r requirements_optional.txt
|
18 |
+
RUN pip install opencv-python==4.8.0.74
|
19 |
+
|
20 |
+
WORKDIR /mast3r/dust3r/croco/models/curope/
|
21 |
+
RUN python setup.py build_ext --inplace
|
22 |
+
|
23 |
+
WORKDIR /mast3r
|
24 |
+
RUN pip install -r requirements.txt
|
25 |
+
|
26 |
+
COPY entrypoint.sh /entrypoint.sh
|
27 |
+
RUN chmod +x /entrypoint.sh
|
28 |
+
|
29 |
+
ENTRYPOINT ["/entrypoint.sh"]
|
docker/files/entrypoint.sh
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -eux
|
4 |
+
|
5 |
+
DEVICE=${DEVICE:-cuda}
|
6 |
+
MODEL=${MODEL:-MASt3R_ViTLarge_BaseDecoder_512_dpt.pth}
|
7 |
+
|
8 |
+
exec python3 demo.py --weights "checkpoints/$MODEL" --device "$DEVICE" --local_network "$@"
|
docker/run.sh
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
set -eux
|
4 |
+
|
5 |
+
# Default model name
|
6 |
+
model_name="MASt3R_ViTLarge_BaseDecoder_512_catmlpdpt_metric.pth"
|
7 |
+
|
8 |
+
check_docker() {
|
9 |
+
if ! command -v docker &>/dev/null; then
|
10 |
+
echo "Docker could not be found. Please install Docker and try again."
|
11 |
+
exit 1
|
12 |
+
fi
|
13 |
+
}
|
14 |
+
|
15 |
+
download_model_checkpoint() {
|
16 |
+
if [ -f "./files/checkpoints/${model_name}" ]; then
|
17 |
+
echo "Model checkpoint ${model_name} already exists. Skipping download."
|
18 |
+
return
|
19 |
+
fi
|
20 |
+
echo "Downloading model checkpoint ${model_name}..."
|
21 |
+
wget "https://download.europe.naverlabs.com/ComputerVision/MASt3R/${model_name}" -P ./files/checkpoints
|
22 |
+
}
|
23 |
+
|
24 |
+
set_dcomp() {
|
25 |
+
if command -v docker-compose &>/dev/null; then
|
26 |
+
dcomp="docker-compose"
|
27 |
+
elif command -v docker &>/dev/null && docker compose version &>/dev/null; then
|
28 |
+
dcomp="docker compose"
|
29 |
+
else
|
30 |
+
echo "Docker Compose could not be found. Please install Docker Compose and try again."
|
31 |
+
exit 1
|
32 |
+
fi
|
33 |
+
}
|
34 |
+
|
35 |
+
run_docker() {
|
36 |
+
export MODEL=${model_name}
|
37 |
+
if [ "$with_cuda" -eq 1 ]; then
|
38 |
+
$dcomp -f docker-compose-cuda.yml up --build
|
39 |
+
else
|
40 |
+
$dcomp -f docker-compose-cpu.yml up --build
|
41 |
+
fi
|
42 |
+
}
|
43 |
+
|
44 |
+
with_cuda=0
|
45 |
+
for arg in "$@"; do
|
46 |
+
case $arg in
|
47 |
+
--with-cuda)
|
48 |
+
with_cuda=1
|
49 |
+
;;
|
50 |
+
--model_name=*)
|
51 |
+
model_name="${arg#*=}.pth"
|
52 |
+
;;
|
53 |
+
*)
|
54 |
+
echo "Unknown parameter passed: $arg"
|
55 |
+
exit 1
|
56 |
+
;;
|
57 |
+
esac
|
58 |
+
done
|
59 |
+
|
60 |
+
|
61 |
+
main() {
|
62 |
+
check_docker
|
63 |
+
download_model_checkpoint
|
64 |
+
set_dcomp
|
65 |
+
run_docker
|
66 |
+
}
|
67 |
+
|
68 |
+
main
|