Skip to content

Commit 3b9d450

Browse files
committed
changed conda setup to uv
1 parent c6b01ba commit 3b9d450

File tree

15 files changed

+4378
-203
lines changed

15 files changed

+4378
-203
lines changed

.github/workflows/backend_checks.yml

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -64,35 +64,37 @@ jobs:
6464
- name: Check 1 - pytest runs without errors
6565
working-directory: docker
6666
run: |
67-
docker compose run dats-backend-api /opt/envs/dats/bin/python -m pytest
67+
docker compose run dats-backend-api /dats_code/.venv/bin/python -m pytest
6868
- name: Check 2 - Database migrates without errors database
6969
working-directory: docker
7070
run: |
71-
docker compose run -e PYTHONPATH='/dats_code/src' dats-backend-api /opt/envs/dats/bin/python migration/run_migrations.py
71+
docker compose run -e PYTHONPATH='/dats_code/src' dats-backend-api /dats_code/.venv/bin/python migration/run_migrations.py
7272
- name: Check 3 - Database schema is up-to-date after migration
7373
working-directory: docker
7474
run: |
75-
docker compose run dats-backend-api /opt/envs/dats/bin/alembic check
75+
docker compose run dats-backend-api /dats_code/.venv/bin/alembic check
7676
- name: Start Remaining Docker Containers
7777
working-directory: docker
7878
run: |
7979
COMPOSE_PROFILES="weaviate,background,backend" docker compose up --wait --quiet-pull
80-
- name: Check 4 - Test End-2-End importer script
81-
working-directory: tools/importer
80+
- name: Check 4 - pyright runs without errors
81+
working-directory: backend
82+
run: |
83+
uv sync --directory . --no-editable --no-install-project --no-install-workspace
84+
source .venv/bin/activate
85+
pyright
86+
- name: Check 5 - Test End-2-End importer script
87+
working-directory: backend
8288
env:
8389
TESTDATA_PASSWORD: ${{ secrets.TESTDATA_PASSWORD }}
8490
run: |
85-
pip install -r requirements.txt
91+
source .venv/bin/activate
92+
cd ../tools/importer
93+
uv pip install -r requirements.txt
8694
wget -q http://ltdata1.informatik.uni-hamburg.de/dwts/totalitarismo.zip
8795
unzip -q -P "$TESTDATA_PASSWORD" totalitarismo.zip
8896
python dats_importer.py --input_dir json --backend_url http://localhost:13120/ --is_json --doctype text
8997
python dats_importer.py --input_dir images --backend_url http://localhost:13120/ --doctype image
90-
- name: Check 5 - pyright runs without errors
91-
run: |
92-
micromamba env create -f backend/environment.yml --yes
93-
micromamba run -n dats pip install -r backend/src/app/preprocessing/ray_model_worker/requirements.txt
94-
micromamba run -n dats pip install ray==2.32.0
95-
micromamba run -n dats pyright
9698
- name: Cleanup
9799
working-directory: docker
98100
if: always()
@@ -103,6 +105,5 @@ jobs:
103105
docker rmi $RAY_IMAGE
104106
fi
105107
docker compose down -v --remove-orphans
106-
micromamba env remove -n dats --yes
107108
BACKEND_IMAGE=uhhlt/dats_backend:$(grep -oP 'DATS_BACKEND_DOCKER_VERSION=\K.*' .env)
108109
docker rmi $BACKEND_IMAGE

backend/.dockerignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,6 @@ notebooks
2222
Dockerfile
2323
Makefile
2424
README.md
25+
26+
# igore venv
27+
.venv

backend/Dockerfile

Lines changed: 26 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -2,67 +2,47 @@
22
# docker push uhhlt/dats_backend:<version>
33

44
FROM ubuntu:jammy-20221020 AS ubuntu
5+
# install uv
6+
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
57
CMD ["/bin/bash"]
68

79
# makes CUDA devices visible to the container by default
810
ENV NVIDIA_VISIBLE_DEVICES=all
911
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
1012

11-
# install basic tools to download files/models
12-
RUN apt-get update -q &&\
13-
apt-get install -q -y --no-install-recommends bzip2 curl wget ca-certificates &&\
14-
apt-get clean &&\
15-
rm -rf /var/lib/apt/lists/* &&\
13+
# install basic Ubuntu packages including python3.11
14+
ARG DEBIAN_FRONTEND=noninteractive
15+
ENV TZ=Europe/Berlin
16+
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
17+
RUN apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:deadsnakes/ppa
18+
RUN apt-get update -q && \
19+
apt-get install -q -y --no-install-recommends bzip2 curl wget ca-certificates libmagic1 libpq-dev build-essential python3.11 python3-pip ffmpeg && \
20+
apt-get clean && \
21+
rm -rf /var/lib/apt/lists/* && \
1622
rm -r /var/cache
1723

18-
# install micromamba for anaconda python package management
19-
ARG MICROMAMBA_VERSION=1.5.1
20-
RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/${MICROMAMBA_VERSION} -o mamba.tar.bz2
21-
RUN tar -xvj -f mamba.tar.bz2 bin/micromamba
22-
RUN rm mamba.tar.bz2
23-
24-
# create the 'dats' python environment with all dependencies
25-
ENV MAMBA_ROOT_PREFIX=/opt
26-
COPY environment.yml .
27-
COPY requirements.txt /requirements.txt
28-
RUN micromamba create -f environment.yml -q -y &&\
29-
micromamba clean -a -f -q -y &&\
30-
find /opt/ -follow -type f -name '*.a' -delete &&\
31-
find /opt/ -follow -type f -name '*.js.map' -delete &&\
32-
rm -r /root/.cache
33-
34-
35-
# These settings & scripts are needed to automatically start any CMD in the container with the python environment
36-
ENV MAMBA_EXE="/bin/micromamba"
37-
ENV ENV_NAME=dats
38-
ARG MAMBA_DOCKERFILE_ACTIVATE=1
39-
COPY _entrypoint.sh /usr/local/bin/_entrypoint.sh
40-
COPY _activate_current_env.sh /usr/local/bin/_activate_current_env.sh
41-
COPY _dockerfile_shell.sh /usr/local/bin/_dockerfile_shell.sh
42-
ENTRYPOINT ["/usr/local/bin/_entrypoint.sh"]
43-
SHELL ["/usr/local/bin/_dockerfile_shell.sh"]
44-
24+
# create the python environment
25+
ENV UV_LINK_MODE=copy
26+
ENV UV_COMPILE_BYTECODE=1
27+
ENV UV_LOCKED=1
4528

29+
WORKDIR /dats_code
30+
RUN --mount=type=cache,target=/root/.cache/uv \
31+
--mount=type=bind,source=uv.lock,target=uv.lock \
32+
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
33+
uv sync --directory . --no-dev --no-editable --no-install-project --no-install-workspace --no-managed-python
34+
ENV PATH="/dats_code/.venv/bin:${PATH}"
4635

4736
# set up python env variables
48-
ARG DATS_ENV
49-
ENV DATS_ENV=${DATS_ENV} \
50-
PYTHONFAULTHANDLER=1 \
51-
PYTHONUNBUFFERED=1 \
52-
PYTHONHASHSEED=random \
53-
PYTHONDONTWRITEBYTECODE=1 \
54-
CUPY_CACHE_IN_MEMORY=1
37+
ENV PYTHONFAULTHANDLER=1
38+
ENV PYTHONUNBUFFERED=1
39+
ENV PYTHONHASHSEED=random
40+
ENV PYTHONDONTWRITEBYTECODE=1
41+
ENV CUPY_CACHE_IN_MEMORY=1
5542

5643
# allow running celery workers as superuser
5744
ENV C_FORCE_ROOT=1
5845

59-
# For development, Jupyter remote kernel
60-
# Using inside the container:
61-
# jupyter lab --ip=0.0.0.0 --allow-root --NotebookApp.custom_display_url=http://127.0.0.1:8888
62-
ARG INSTALL_JUPYTER=false
63-
RUN bash -c "if [ $INSTALL_JUPYTER == 'true' ] ; then micromamba install jupyterlab -c conda-forge -q -y -n dats && micromamba clean -a -f -q -y ; fi"
64-
6546
# copy the actual dats source code into the image
66-
WORKDIR /dats_code
6747
COPY . /dats_code
6848
WORKDIR /dats_code/src

backend/_activate_current_env.sh

Lines changed: 0 additions & 39 deletions
This file was deleted.

backend/_dockerfile_shell.sh

Lines changed: 0 additions & 10 deletions
This file was deleted.

backend/_entrypoint.sh

Lines changed: 0 additions & 24 deletions
This file was deleted.

backend/environment.yml

Lines changed: 0 additions & 55 deletions
This file was deleted.

0 commit comments

Comments
 (0)