From f52a875248d63399318abdfee7c135488b9a189d Mon Sep 17 00:00:00 2001 From: Maroun Touma Date: Wed, 8 Jan 2025 18:30:20 -0500 Subject: [PATCH 1/2] Added TRANSFROM_NAME to docker build arg allowing simple copy paste of docker file template Signed-off-by: Maroun Touma --- .make.defaults | 1 + transforms/Dockerfile.python.template | 33 ++++++++++++++++++ transforms/Dockerfile.ray.template | 31 +++++++++++++++++ transforms/Dockerfile.spark.template | 34 +++++++++++++++++++ transforms/universal/doc_id/Dockerfile.python | 11 ++++-- transforms/universal/doc_id/Dockerfile.ray | 12 +++---- transforms/universal/doc_id/Dockerfile.spark | 3 +- transforms/universal/filter/Dockerfile.python | 11 ++++-- transforms/universal/filter/Dockerfile.ray | 12 +++---- transforms/universal/filter/Dockerfile.spark | 3 +- transforms/universal/hap/Dockerfile.python | 3 +- transforms/universal/hap/Dockerfile.ray | 6 ++-- 12 files changed, 135 insertions(+), 25 deletions(-) create mode 100644 transforms/Dockerfile.python.template create mode 100644 transforms/Dockerfile.ray.template create mode 100644 transforms/Dockerfile.spark.template diff --git a/.make.defaults b/.make.defaults index 80df91c8e..1f0ffa7cb 100644 --- a/.make.defaults +++ b/.make.defaults @@ -226,6 +226,7 @@ __check_defined = \ --platform $(DOCKER_PLATFORM) \ --build-arg EXTRA_INDEX_URL=$(EXTRA_INDEX_URL) \ --build-arg BASE_IMAGE=$(BASE_IMAGE) \ + --build-arg TRANSFORM_NAME=$(TRANSFORM_NAME) \ --build-arg DPK_WHEEL_FILE_NAME=$(DPK_WHEEL_FILE_NAME) \ --build-arg BUILD_DATE=$(shell date -u +'%Y-%m-%dT%H:%M:%SZ') \ --build-arg GIT_COMMIT=$(shell git log -1 --format=%h) . diff --git a/transforms/Dockerfile.python.template b/transforms/Dockerfile.python.template new file mode 100644 index 000000000..1bde08841 --- /dev/null +++ b/transforms/Dockerfile.python.template @@ -0,0 +1,33 @@ +FROM docker.io/python:3.10.14-slim-bullseye + +RUN pip install --upgrade --no-cache-dir pip + +# install pytest +RUN pip install --no-cache-dir pytest + +# Create a user and use it to run the transform +RUN useradd -ms /bin/bash dpk +USER dpk +WORKDIR /home/dpk +ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME + +# Copy and install data processing libraries +# These are expected to be placed in the docker context before this is run (see the make image). +COPY --chown=dpk:root data-processing-dist data-processing-dist +RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} + +# END OF STEPS destined for a data-prep-kit base image + +COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:root requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Set environment +ENV PYTHONPATH /home/dpk + +# Put these at the end since they seem to upset the docker cache. +ARG BUILD_DATE +ARG GIT_COMMIT +LABEL build-date=$BUILD_DATE +LABEL git-commit=$GIT_COMMIT diff --git a/transforms/Dockerfile.ray.template b/transforms/Dockerfile.ray.template new file mode 100644 index 000000000..944d04dd8 --- /dev/null +++ b/transforms/Dockerfile.ray.template @@ -0,0 +1,31 @@ +ARG BASE_IMAGE=docker.io/rayproject/ray:2.24.0-py310 +FROM ${BASE_IMAGE} + +RUN pip install --upgrade --no-cache-dir pip + +# install pytest +RUN pip install --no-cache-dir pytest +ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME + +# Copy and install data processing libraries +# These are expected to be placed in the docker context before this is run (see the make image). +COPY --chown=ray:users data-processing-dist data-processing-dist +RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[ray] + + +COPY --chown=ray:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=ray:users requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Grant non-root users the necessary permissions to the ray directory +RUN chmod 755 /home/ray + +# Set environment +ENV PYTHONPATH /home/ray + +# Put these at the end since they seem to upset the docker cache. +ARG BUILD_DATE +ARG GIT_COMMIT +LABEL build-date=$BUILD_DATE +LABEL git-commit=$GIT_COMMIT diff --git a/transforms/Dockerfile.spark.template b/transforms/Dockerfile.spark.template new file mode 100644 index 000000000..0a183d8d9 --- /dev/null +++ b/transforms/Dockerfile.spark.template @@ -0,0 +1,34 @@ +FROM quay.io/dataprep1/data-prep-kit/data-prep-kit-spark-3.5.2:latest + +USER root +# install pytest +RUN pip install --no-cache-dir pytest + +WORKDIR ${SPARK_HOME}/work-dir +ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME + +# Copy and install data processing libraries +# These are expected to be placed in the docker context before this is run (see the make image). +COPY --chown=spark:root data-processing-dist data-processing-dist +RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] + + +# Install project source + +## Copy the python version of the tansform +COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:root requirements.txt requirements.txt +RUN pip install -r requirements.txt + + +USER spark + +# Set environment +ENV PYTHONPATH=${SPARK_HOME}/work-dir/:${PYTHONPATH} + +# Put these at the end since they seem to upset the docker cache. +ARG BUILD_DATE +ARG GIT_COMMIT +LABEL build-date=$BUILD_DATE +LABEL git-commit=$GIT_COMMIT diff --git a/transforms/universal/doc_id/Dockerfile.python b/transforms/universal/doc_id/Dockerfile.python index fc634a043..1bde08841 100644 --- a/transforms/universal/doc_id/Dockerfile.python +++ b/transforms/universal/doc_id/Dockerfile.python @@ -2,21 +2,26 @@ FROM docker.io/python:3.10.14-slim-bullseye RUN pip install --upgrade --no-cache-dir pip +# install pytest +RUN pip install --no-cache-dir pytest + # Create a user and use it to run the transform RUN useradd -ms /bin/bash dpk USER dpk WORKDIR /home/dpk ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). COPY --chown=dpk:root data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} -COPY --chown=dpk:root dpk_doc_id/ dpk_doc_id/ -COPY --chown=dpk:root requirements.txt requirements.txt -RUN pip install --no-cache-dir -r requirements.txt +# END OF STEPS destined for a data-prep-kit base image +COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:root requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt # Set environment ENV PYTHONPATH /home/dpk diff --git a/transforms/universal/doc_id/Dockerfile.ray b/transforms/universal/doc_id/Dockerfile.ray index d8eadc23b..944d04dd8 100644 --- a/transforms/universal/doc_id/Dockerfile.ray +++ b/transforms/universal/doc_id/Dockerfile.ray @@ -1,5 +1,4 @@ ARG BASE_IMAGE=docker.io/rayproject/ray:2.24.0-py310 - FROM ${BASE_IMAGE} RUN pip install --upgrade --no-cache-dir pip @@ -7,16 +6,17 @@ RUN pip install --upgrade --no-cache-dir pip # install pytest RUN pip install --no-cache-dir pytest ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). COPY --chown=ray:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[ray] -## Copy the python version of the tansform -COPY --chown=ray:users dpk_doc_id/ dpk_doc_id/ -COPY --chown=ray:users requirements.txt requirements.txt -RUN pip install -r requirements.txt + +COPY --chown=ray:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=ray:users requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt # Grant non-root users the necessary permissions to the ray directory RUN chmod 755 /home/ray @@ -28,4 +28,4 @@ ENV PYTHONPATH /home/ray ARG BUILD_DATE ARG GIT_COMMIT LABEL build-date=$BUILD_DATE -LABEL git-commit=$GIT_COMMIT \ No newline at end of file +LABEL git-commit=$GIT_COMMIT diff --git a/transforms/universal/doc_id/Dockerfile.spark b/transforms/universal/doc_id/Dockerfile.spark index 70c626a87..0a183d8d9 100644 --- a/transforms/universal/doc_id/Dockerfile.spark +++ b/transforms/universal/doc_id/Dockerfile.spark @@ -6,6 +6,7 @@ RUN pip install --no-cache-dir pytest WORKDIR ${SPARK_HOME}/work-dir ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). @@ -16,7 +17,7 @@ RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_doc_id/ dpk_doc_id/ +COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ COPY --chown=spark:root requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/filter/Dockerfile.python b/transforms/universal/filter/Dockerfile.python index 68319778b..1bde08841 100644 --- a/transforms/universal/filter/Dockerfile.python +++ b/transforms/universal/filter/Dockerfile.python @@ -2,21 +2,26 @@ FROM docker.io/python:3.10.14-slim-bullseye RUN pip install --upgrade --no-cache-dir pip +# install pytest +RUN pip install --no-cache-dir pytest + # Create a user and use it to run the transform RUN useradd -ms /bin/bash dpk USER dpk WORKDIR /home/dpk ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). COPY --chown=dpk:root data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} -COPY --chown=dpk:root dpk_filter/ dpk_filter/ -COPY --chown=dpk:root requirements.txt requirements.txt -RUN pip install --no-cache-dir -r requirements.txt +# END OF STEPS destined for a data-prep-kit base image +COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:root requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt # Set environment ENV PYTHONPATH /home/dpk diff --git a/transforms/universal/filter/Dockerfile.ray b/transforms/universal/filter/Dockerfile.ray index fb2db3bd2..944d04dd8 100644 --- a/transforms/universal/filter/Dockerfile.ray +++ b/transforms/universal/filter/Dockerfile.ray @@ -1,5 +1,4 @@ ARG BASE_IMAGE=docker.io/rayproject/ray:2.24.0-py310 - FROM ${BASE_IMAGE} RUN pip install --upgrade --no-cache-dir pip @@ -7,16 +6,17 @@ RUN pip install --upgrade --no-cache-dir pip # install pytest RUN pip install --no-cache-dir pytest ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). COPY --chown=ray:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[ray] -## Copy the python version of the tansform -COPY --chown=ray:users dpk_filter/ dpk_filter/ -COPY --chown=ray:users requirements.txt requirements.txt -RUN pip install -r requirements.txt + +COPY --chown=ray:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=ray:users requirements.txt requirements.txt +RUN pip install --no-cache-dir -r requirements.txt # Grant non-root users the necessary permissions to the ray directory RUN chmod 755 /home/ray @@ -28,4 +28,4 @@ ENV PYTHONPATH /home/ray ARG BUILD_DATE ARG GIT_COMMIT LABEL build-date=$BUILD_DATE -LABEL git-commit=$GIT_COMMIT \ No newline at end of file +LABEL git-commit=$GIT_COMMIT diff --git a/transforms/universal/filter/Dockerfile.spark b/transforms/universal/filter/Dockerfile.spark index aa7a9ab6d..0a183d8d9 100644 --- a/transforms/universal/filter/Dockerfile.spark +++ b/transforms/universal/filter/Dockerfile.spark @@ -6,6 +6,7 @@ RUN pip install --no-cache-dir pytest WORKDIR ${SPARK_HOME}/work-dir ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). @@ -16,7 +17,7 @@ RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_filter/ dpk_filter/ +COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ COPY --chown=spark:root requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/hap/Dockerfile.python b/transforms/universal/hap/Dockerfile.python index e31639398..1bde08841 100644 --- a/transforms/universal/hap/Dockerfile.python +++ b/transforms/universal/hap/Dockerfile.python @@ -10,6 +10,7 @@ RUN useradd -ms /bin/bash dpk USER dpk WORKDIR /home/dpk ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). @@ -18,7 +19,7 @@ RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_hap/ dpk_hap/ +COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ COPY --chown=dpk:root requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt diff --git a/transforms/universal/hap/Dockerfile.ray b/transforms/universal/hap/Dockerfile.ray index c11c63c42..944d04dd8 100644 --- a/transforms/universal/hap/Dockerfile.ray +++ b/transforms/universal/hap/Dockerfile.ray @@ -6,6 +6,7 @@ RUN pip install --upgrade --no-cache-dir pip # install pytest RUN pip install --no-cache-dir pytest ARG DPK_WHEEL_FILE_NAME +ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). @@ -13,10 +14,7 @@ COPY --chown=ray:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[ray] -#COPY requirements.txt requirements.txt -#RUN pip install --no-cache-dir -r requirements.txt - -COPY --chown=ray:users dpk_hap/ dpk_hap/ +COPY --chown=ray:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ COPY --chown=ray:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt From cef1dea5e6e8de3d1bed3a50aef77a0f79981e37 Mon Sep 17 00:00:00 2001 From: Maroun Touma Date: Thu, 9 Jan 2025 07:27:23 -0500 Subject: [PATCH 2/2] change *:root to *:users Signed-off-by: Maroun Touma --- transforms/Dockerfile.python.template | 6 +++--- transforms/Dockerfile.spark.template | 6 +++--- transforms/universal/doc_id/Dockerfile.python | 6 +++--- transforms/universal/doc_id/Dockerfile.spark | 6 +++--- transforms/universal/filter/Dockerfile.python | 6 +++--- transforms/universal/filter/Dockerfile.spark | 6 +++--- transforms/universal/hap/Dockerfile.python | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/transforms/Dockerfile.python.template b/transforms/Dockerfile.python.template index 1bde08841..9f38097b7 100644 --- a/transforms/Dockerfile.python.template +++ b/transforms/Dockerfile.python.template @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/Dockerfile.spark.template b/transforms/Dockerfile.spark.template index 0a183d8d9..1af783438 100644 --- a/transforms/Dockerfile.spark.template +++ b/transforms/Dockerfile.spark.template @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/doc_id/Dockerfile.python b/transforms/universal/doc_id/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/doc_id/Dockerfile.python +++ b/transforms/universal/doc_id/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/doc_id/Dockerfile.spark b/transforms/universal/doc_id/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/doc_id/Dockerfile.spark +++ b/transforms/universal/doc_id/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/filter/Dockerfile.python b/transforms/universal/filter/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/filter/Dockerfile.python +++ b/transforms/universal/filter/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/filter/Dockerfile.spark b/transforms/universal/filter/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/filter/Dockerfile.spark +++ b/transforms/universal/filter/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/hap/Dockerfile.python b/transforms/universal/hap/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/hap/Dockerfile.python +++ b/transforms/universal/hap/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment