# Use an official PyTorch image with CUDA support as the base image FROM pytorch/pytorch:1.12.1-cuda11.3-cudnn8-runtime # Set up a new user named "user" with user ID 1000 RUN useradd -m -u 1000 user # Switch to the "user" user USER user ENV HOME=/home/user \ PATH=/home/user/.local/bin:$PATH # Set the working directory to the user's home directory WORKDIR $HOME/app # Clone your repository or add your code to the container RUN git clone -b dev https://github.com/camenduru/DiffBIR $HOME/app # Install Python dependencies RUN pip3 install -q einops pytorch_lightning gradio omegaconf xformers==0.0.20 transformers lpips opencv-python # Install open_clip from GitHub RUN pip3 install -q git+https://github.com/mlfoundations/open_clip@v2.20.0 # Use sudo to install aria2 with elevated privileges USER root RUN apt-get update && apt-get install -y sudo wget libgl1-mesa-glx && \ wget https://github.com/aria2/aria2/releases/download/release-1.36.0/aria2-1.36.0.tar.gz && \ tar -xzvf aria2-1.36.0.tar.gz && \ cd aria2-1.36.0 && \ ./configure && \ make && \ make install && \ cd .. && \ rm -rf aria2-1.36.0 && \ rm aria2-1.36.0.tar.gz # Switch back to the "user" user USER user # Download checkpoint files using aria2 RUN aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/lxq007/DiffBIR/resolve/main/general_full_v1.ckpt -d $HOME/app/models -o general_full_v1.ckpt RUN aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/lxq007/DiffBIR/resolve/main/general_swinir_v1.ckpt -d $HOME/app/models -o general_swinir_v1.ckpt # Expose any necessary ports (if your application requires it) # EXPOSE 80 RUN find $HOME/app # Set the environment variable to specify the GPU device ENV CUDA_DEVICE_ORDER=PCI_BUS_ID ENV CUDA_VISIBLE_DEVICES=0 # Define the command to run your application CMD ["python", "gradio_diffbir.py", "--ckpt", "/home/user/app/models/general_full_v1.ckpt", "--config", "/home/user/app/configs/model/cldm.yaml", "--reload_swinir", "--swinir_ckpt", "/home/user/app/models/general_swinir_v1.ckpt"]