Skip to content

Commit

Permalink
more work on upgrading workflows to new system
Browse files Browse the repository at this point in the history
  • Loading branch information
davidlougheed committed Oct 23, 2023
1 parent b7f4504 commit 1d1132f
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 27 deletions.
45 changes: 39 additions & 6 deletions chord_metadata_service/chord/workflows/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
DRS_URL_INPUT = wm.WorkflowServiceUrlInput(id="drs_url", service_kind="drs")
KATSU_URL_INPUT = wm.WorkflowServiceUrlInput(id="katsu_url", service_kind="metadata")
PROJECT_DATASET_INPUT = wm.WorkflowProjectDatasetInput(id="project_dataset")
ACCESS_TOKEN_INPUT = wm.WorkflowConfigInput(id="access_token", key="access_token")


workflow_set = WorkflowSet()
Expand All @@ -69,7 +70,14 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
description="This ingestion workflow will validate and import a Phenopackets schema-compatible JSON document.",
tags=[DATA_TYPE_PHENOPACKET],
file="phenopackets_json.wdl",
inputs=[PROJECT_DATASET_INPUT, KATSU_URL_INPUT, json_file_input("json_document")],
inputs=[
# injected
ACCESS_TOKEN_INPUT,
KATSU_URL_INPUT,
# user
PROJECT_DATASET_INPUT,
json_file_input("json_document"),
],
))

workflow_set.add_workflow(WORKFLOW_EXPERIMENTS_JSON, wm.WorkflowDefinition(
Expand All @@ -78,7 +86,14 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
description="This ingestion workflow will validate and import a Bento Experiments schema-compatible JSON document.",
tags=[DATA_TYPE_EXPERIMENT],
file="experiments_json.wdl",
inputs=[PROJECT_DATASET_INPUT, KATSU_URL_INPUT, json_file_input("json_document")],
inputs=[
# injected
ACCESS_TOKEN_INPUT,
KATSU_URL_INPUT,
# user
PROJECT_DATASET_INPUT,
json_file_input("json_document"),
],
))

workflow_set.add_workflow(WORKFLOW_READSET, wm.WorkflowDefinition(
Expand All @@ -88,6 +103,9 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
tags=[DATA_TYPE_EXPERIMENT_RESULT, DATA_TYPE_READSET],
file="readset.wdl",
inputs=[
# injected
ACCESS_TOKEN_INPUT,
# user
PROJECT_DATASET_INPUT,
wm.WorkflowFileArrayInput(
id="readset_files",
Expand All @@ -104,7 +122,13 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
"VCF files found in the Dataset.",
tags=[DATA_TYPE_EXPERIMENT_RESULT],
file="maf_derived_from_vcf_json.wdl",
inputs=[PROJECT_DATASET_INPUT, json_file_input("json_document")],
inputs=[
# injected
ACCESS_TOKEN_INPUT,
# user
PROJECT_DATASET_INPUT,
json_file_input("json_document"),
],
))

# Analysis workflows ---------------------------------------------------------------------------------------------------
Expand All @@ -115,10 +139,13 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
description="This analysis workflow will create MAF files from every VCF file found in a dataset.",
file="vcf2maf.wdl",
inputs=[
PROJECT_DATASET_INPUT,
wm.WorkflowStringInput(id="vep_cache_dir"), # TODO: injected, from config
# injected
ACCESS_TOKEN_INPUT,
wm.WorkflowConfigInput(key="vep_cache_dir"),
DRS_URL_INPUT,
KATSU_URL_INPUT,
# user
PROJECT_DATASET_INPUT,
]
))

Expand All @@ -130,7 +157,13 @@ def json_file_output(id_: str, output_name: Optional[str] = None):
description="This workflow creates a bundle for cBioPortal ingestion.",
tags=["cbioportal"],
file="cbioportal_export.wdl",
inputs=[PROJECT_DATASET_INPUT, DRS_URL_INPUT, KATSU_URL_INPUT],
inputs=[
# injected
DRS_URL_INPUT,
KATSU_URL_INPUT,
# user
PROJECT_DATASET_INPUT,
],
))

# ----------------------------------------------------------------------------------------------------------------------
Expand Down
15 changes: 7 additions & 8 deletions chord_metadata_service/chord/workflows/wdls/experiments_json.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,17 @@ version 1.0
workflow experiments_json {
input {
File json_document
String run_dir
String project_id
String dataset_id
String project_dataset
String katsu_url
String secret__access_token
String access_token
}

call ingest_task {
input:
json_document = json_document,
katsu_url = katsu_url,
dataset_id = dataset_id,
token = secret__access_token
project_dataset = project_dataset,
token = access_token
}

output {
Expand All @@ -28,15 +26,16 @@ task ingest_task {
input {
File json_document
String katsu_url
String dataset_id
String project_dataset
String token
}
command <<<
dataset_id=$(python3 -c 'print("~{project_dataset}".split(":")[1]))'))
RESPONSE=$(curl -X POST -k -s -w "%{http_code}" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ~{token}" \
--data "@~{json_document}" \
"~{katsu_url}/ingest/~{dataset_id}/experiments_json")
"~{katsu_url}/ingest/${dataset_id}/experiments_json")
if [[ "${RESPONSE}" != "204" ]]
then
echo "Error: Metadata service replied with ${RESPONSE}" 1>&2 # to stderr
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,17 @@ version 1.0
workflow phenopackets_json {
input {
File json_document
String secret__access_token
String run_dir
String project_id
String dataset_id
String access_token
String project_dataset
String katsu_url
}

call ingest_task {
input:
json_document = json_document,
katsu_url = katsu_url,
dataset_id = dataset_id,
token = secret__access_token
project_dataset = project_dataset,
token = access_token
}

output {
Expand All @@ -28,15 +26,16 @@ task ingest_task {
input {
File json_document
String katsu_url
String dataset_id
String project_dataset
String token
}
command <<<
dataset_id=$(python3 -c 'print("~{project_dataset}".split(":")[1]))'))
RESPONSE=$(curl -X POST -k -s -w "%{http_code}" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ~{token}" \
--data "@~{json_document}" \
"~{katsu_url}/ingest/~{dataset_id}/phenopackets_json")
"~{katsu_url}/ingest/${dataset_id}/phenopackets_json")
if [[ "${RESPONSE}" != "204" ]]
then
echo "Error: Metadata service replied with ${RESPONSE}" 1>&2 # to stderr
Expand Down
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ classifiers = [

[tool.poetry.dependencies]
python = "^3.10.0"
bento-lib = {version = "9.0.0a2", extras = ["django"]}
bento-lib = {version = "9.0.0a4", extras = ["django"]}
Django = "^4.2.1"
django-autocomplete-light = "^3.9.4"
django-cors-headers = "^3.13.0"
Expand Down

0 comments on commit 1d1132f

Please sign in to comment.