Skip to content

Commit

Permalink
Handle venv for pipeline generation.
Browse files Browse the repository at this point in the history
Signed-off-by: Revital Sur <[email protected]>
  • Loading branch information
revit13 committed Sep 22, 2024
1 parent 558117d commit abf814b
Show file tree
Hide file tree
Showing 8 changed files with 13 additions and 11 deletions.
2 changes: 2 additions & 0 deletions kfp/pipeline_generator/single-pipeline/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
## Steps to generate a new pipeline
- create a `pipeline_definitions.yaml` file for the required task (similar to the example [pipeline_definitions.yaml for the noop task](../../../transforms/universal/noop/kfp_ray/pipeline_definitions.yaml)).
- execute `make -C ../../../transforms workflow-venv` from this directory
- execute `source ../../../transforms/venv/bin/activate`
- execute `./run.sh --config_file <pipeline_definitions_file_path> --output_dir_file <destination directory>`. When `pipeline_definitions_file_path` is the path of the `pipeline_definitions.yaml` file that defines the pipeline and `destination directory` is a directory where new pipeline file
will be generated.
4 changes: 1 addition & 3 deletions kfp/pipeline_generator/single-pipeline/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,7 @@ fi
ROOT_DIR=${PWD}

mkdir -p ${ROOT_DIR}/${DIST_DIR}/
python3 -m venv venv
source venv/bin/activate
pip install jinja2

script_dir="$(dirname "$(readlink -f "$0")")"
echo $PYTHONPATH
python3 ${script_dir}/pipeline_generator.py -c ${DEF_FILE} -od ${ROOT_DIR}/${DIST_DIR}/
2 changes: 2 additions & 0 deletions kfp/pipeline_generator/superpipeline/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
## Steps to generate a new super pipeline in KFP v1.
- The super pipeline allows you to execute several transforms within a single pipeline. For more details, refer [multi_transform_pipeline.md](../../doc/multi_transform_pipeline.md).
- Create a `super_pipeline_definitions.yaml` file for the required task. You can refer to the example [super_pipeline_definitions.yaml](./super_pipeline_definitions.yaml).
- execute `make -C ../../../transforms workflow-venv` from this directory
- execute `source ../../../transforms/venv/bin/activate`
- Execute `./run.sh --config_file < super_pipeline_definitions.yaml> --output_dir_file <destination_directory>`. Here, `super_pipeline_definitions.yaml` is the super pipeline definition file, that you created above, and `destination_directory` is the directory where the new super pipeline file will be generated.


Expand Down
5 changes: 1 addition & 4 deletions kfp/pipeline_generator/superpipeline/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,5 @@ fi
ROOT_DIR=${PWD}

mkdir -p ${ROOT_DIR}/${DIST_DIR}/
python3 -m venv venv
source venv/bin/activate
pip install pre-commit
pip install jinja2

python3 super_pipeline_generator.py -c ${DEF_FILE} -od ${ROOT_DIR}/${DIST_DIR}/
3 changes: 3 additions & 0 deletions transforms/.make.workflows
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ ${WORKFLOW_VENV_ACTIVATE}: ${REPOROOT}/.make.versions ${REPOROOT}/kfp/kfp_ray_co
pip install -e $(REPOROOT)/kfp/kfp_support_lib/shared_workflow_support; \
pip install -e $(REPOROOT)/kfp/kfp_support_lib/$(WORKFLOW_SUPPORT_LIB); \
$(MAKE) -C ${REPOROOT}/kfp/kfp_ray_components set-versions
pip install jinja2
pip install pyyaml
pip install pre-commit
@# Help: Create the virtual environment common to all workflows

.PHONY: .workflows.upload-pipeline
Expand Down
4 changes: 2 additions & 2 deletions transforms/universal/noop/kfp_ray/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -51,5 +51,5 @@ workflow-upload: workflow-build
done

.PHONY: workflow-generate
workflow-generate:
../../../../kfp/pipeline_generator/single-pipeline/run.sh -c `pwd`/pipeline_definitions.yaml -od .
workflow-generate: workflow-venv
. ${WORKFLOW_VENV_ACTIVATE} && ../../../../kfp/pipeline_generator/single-pipeline/run.sh -c `pwd`/pipeline_definitions.yaml -od .
2 changes: 1 addition & 1 deletion transforms/universal/noop/kfp_ray/noop_wf.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def noop(
server_url: str = "http://kuberay-apiserver-service.kuberay.svc.cluster.local:8888",
# data access
data_s3_config: str = "{'input_folder': 'test/noop/input/', 'output_folder': 'test/noop/output/'}",
data_s3_access_secret: str = "s3-minio",
data_s3_access_secret: str = "s3-secret",
data_max_files: int = -1,
data_num_samples: int = -1,
data_checkpointing: bool = False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ pipeline_parameters:
pipeline_common_input_parameters_values:
kfp_base_image: "quay.io/dataprep1/data-prep-kit/kfp-data-processing:latest"
transform_image: "quay.io/dataprep1/data-prep-kit/noop-ray:latest"
s3_access_secret: "s3-minio"
s3_access_secret: "s3-secret"
image_pull_secret: ""
input_folder: "test/noop/input/"
output_folder: "test/noop/output/"
Expand Down

0 comments on commit abf814b

Please sign in to comment.