Skip to content

Commit

Permalink
Transgene now returns None with an empty VCF and does not fail.
Browse files Browse the repository at this point in the history
If Transgene returns None, all subsequent tools which rely on Transgene’s
return value return None as well.

Resolves #214
  • Loading branch information
davidstew committed May 20, 2018
1 parent 0631068 commit 17e489b
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 7 deletions.
8 changes: 8 additions & 0 deletions src/protect/binding_prediction/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ def spawn_antigen_predictors(job, transgened_files, phlat_files, univ_options, m
+- 'normal': fsID
:rtype: tuple(dict, dict)
"""
if transgened_files is None:
job.fileStore.logToMaster('Spawn antigen predictors failed to find any pepetides for %s.'
% univ_options['patient'])
return None
work_dir = os.getcwd()
mhci_options, mhcii_options = mhc_options
pept_files = {
Expand Down Expand Up @@ -498,6 +502,10 @@ def merge_mhc_peptide_calls(job, antigen_predictions, transgened_files, univ_opt
:rtype: dict
"""
job.fileStore.logToMaster('Merging MHC calls')
if antigen_predictions is None and transgened_files is None:
job.fileStore.logToMaster('Merge_mhc_peptide_calls failed to find any peptides for %s.'
% univ_options['patient'])
return None
work_dir = os.getcwd()
pept_files = {
'10_mer.faa': transgened_files['transgened_tumor_10_mer_peptides.faa'],
Expand Down
22 changes: 15 additions & 7 deletions src/protect/mutation_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@ def run_transgene(job, snpeffed_file, rna_bam, univ_options, transgene_options,
'--pep_lens', '9,10,15',
'--cores', str(transgene_options['n']),
'--genome', input_files['genome.fa'],
'--annotation', input_files['annotation.gtf']]
'--annotation', input_files['annotation.gtf'],
'--log_file', '/data/transgene.log']

if snpeffed_file is not None:
parameters.extend(['--snpeff', input_files['snpeffed_muts.vcf']])
Expand All @@ -111,11 +112,15 @@ def run_transgene(job, snpeffed_file, rna_bam, univ_options, transgene_options,
parameters += ['--transcripts', fusion_files['transcripts.fa'],
'--fusions', fusion_files['fusion_calls']]

docker_call(tool='transgene',
tool_parameters=parameters,
work_dir=work_dir,
dockerhub=univ_options['dockerhub'],
tool_version=transgene_options['version'])
try:
docker_call(tool='transgene',
tool_parameters=parameters,
work_dir=work_dir,
dockerhub=univ_options['dockerhub'],
tool_version=transgene_options['version'])
finally:
logfile = os.path.join(os.getcwd(), 'transgene.log')
export_results(job, job.fileStore.writeGlobalFile(logfile), logfile, univ_options, subfolder='peptides')

output_files = defaultdict()
for peplen in ['9', '10', '15']:
Expand All @@ -128,7 +133,10 @@ def run_transgene(job, snpeffed_file, rna_bam, univ_options, transgene_options,
if tissue_type == 'tumor':
os.rename(os.path.join(work_dir, old_pepfile + '.map'),
os.path.join(work_dir, pepfile + '.map'))

if not os.path.exists(pepfile):
job.fileStore.logToMaster('Transgene failed to find any pepetides for %s.'
% univ_options['patient'])
return None
output_files[pepfile] = job.fileStore.writeGlobalFile(os.path.join(work_dir, pepfile))
export_results(job, output_files[pepfile], pepfile, univ_options, subfolder='peptides')
mapfile = '_'.join(['transgened_tumor', peplen, 'mer_peptides.faa.map'])
Expand Down
4 changes: 4 additions & 0 deletions src/protect/rankboost.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ def wrap_rankboost(job, rsem_files, merged_mhc_calls, transgene_out, univ_option
+- 'mhci_rankboost_detailed_results.txt': fsID
:rtype: dict
"""
if merged_mhc_calls is None:
job.fileStore.logToMaster('Wrap_rankboost failed to find any pepetides for %s.'
% univ_options['patient'])
return None
rankboost = job.addChildJobFn(boost_ranks, rsem_files['rsem.isoforms.results'],
merged_mhc_calls, transgene_out, univ_options, rankboost_options)

Expand Down

0 comments on commit 17e489b

Please sign in to comment.