@@ -81,17 +81,23 @@ RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list &
8181# b/128333086: Set PROJ_DATA to points to the proj4 cartographic library.
8282ENV PROJ_DATA=/opt/conda/share/proj
8383
84+ # Install micromamba, setup channels, and replace conda with micromamba
85+ ENV MAMBA_ROOT_PREFIX=/opt/conda
86+ RUN curl -L "https://micro.mamba.pm/install.sh" -o /tmp/micromamba-install.sh \
87+ && bash /tmp/micromamba-install.sh \
88+ && rm /tmp/micromamba-install.sh \
89+ && mv ~/.local/bin/micromamba /usr/bin/micromamba \
90+ && (!(which conda) || cp /usr/bin/micromamba $(which conda)) \
91+ && micromamba config append channels nvidia \
92+ && micromamba config append channels rapidsai \
93+ && micromamba config append channels conda-forge \
94+ && micromamba config set channel_priority flexible \
95+ && python -m nb_conda_kernels.install --disable
96+
8497# Install conda packages not available on pip.
8598# When using pip in a conda environment, conda commands should be ran first and then
8699# the remaining pip commands: https://www.anaconda.com/using-pip-in-a-conda-environment/
87- RUN conda config --add channels nvidia && \
88- conda config --add channels rapidsai && \
89- conda config --set solver libmamba && \
90- # b/299991198: remove curl/libcurl install once DLVM base image includes version >= 7.86
91- conda install -c conda-forge mamba curl libcurl && \
92- # Base image channel order: conda-forge (highest priority), defaults.
93- # End state: rapidsai (highest priority), nvidia, conda-forge, defaults.
94- mamba install -y mkl cartopy imagemagick pyproj "shapely<2" && \
100+ RUN micromamba install -y mkl cartopy imagemagick pyproj "shapely<2" && \
95101 rm -rf /opt/conda/lib/python3.10/site-packages/pyproj/proj_dir/ && \
96102 /tmp/clean-layer.sh
97103
@@ -100,8 +106,7 @@ RUN conda config --add channels nvidia && \
100106# b/341938540: unistall grpc-cpp to allow >=v24.4 cudf and cuml to be installed.
101107{{ if eq .Accelerator "gpu" }}
102108RUN pip uninstall -y pyarrow && \
103- mamba remove -y --force grpc-cpp && \
104- mamba install -y -c conda-forge spacy cudf>=24.4 cuml>=24.4 cupy cuda-version=$CUDA_MAJOR_VERSION.$CUDA_MINOR_VERSION && \
109+ micromamba install -vvvy spacy "cudf>=24.4" "cuml>=24.4" cupy cuda-version=$CUDA_MAJOR_VERSION.$CUDA_MINOR_VERSION && \
105110 /tmp/clean-layer.sh
106111{{ else }}
107112RUN pip install spacy && \
@@ -114,7 +119,7 @@ RUN pip install spacy && \
114119COPY --from=torch_whl /tmp/whl/*.whl /tmp/torch/
115120# b/356397043: We are currently using cuda 12.3,
116121# but magma-cuda121 is the latest compatible version
117- RUN mamba install -y -c pytorch magma-cuda121 && \
122+ RUN micromamba install -y -c pytorch magma-cuda121 && \
118123 pip install /tmp/torch/*.whl && \
119124 sudo apt -y install libsox-dev && \
120125 rm -rf /tmp/torch && \
@@ -507,7 +512,7 @@ RUN pip install wandb \
507512 pip install --no-dependencies fastai fastdownload && \
508513 # b/343971718: remove duplicate aiohttp installs, and reinstall it
509514 rm -rf /opt/conda/lib/python3.10/site-packages/aiohttp* && \
510- mamba install --force-reinstall -y aiohttp && \
515+ micromamba install --force-reinstall -y aiohttp && \
511516 /tmp/clean-layer.sh
512517
513518# Download base easyocr models.
0 commit comments