Skip to content

Commit

Permalink
Update source formatting for v2.9.5 release
Browse files Browse the repository at this point in the history
  • Loading branch information
Sangtae Kim committed Jun 25, 2018
1 parent 9ce4e66 commit 5daae2b
Show file tree
Hide file tree
Showing 8 changed files with 27 additions and 27 deletions.
4 changes: 2 additions & 2 deletions docs/userGuide/errorAnalysis.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@ Error Pattern Analyzer User Guide
* [Outputs](#outputs)
* [Counts files](#counts-files)
* [Error model output](#error-model-output)
* [Error counting workflow configuration and execution](#error-counting-workflow-configuration-and-execution)
* [Allele counting workflow configuration and execution](#allele-counting-workflow-configuration-and-execution)
* [Configuration](#configuration)
* [Configuration: Excluding regions](#configuration-excluding-regions)
* [Configuration: Annotating known variants](#configuration-annotating-known-variants)
* [Configuration: Report observed indels](#configuration-report-observed-indels)
* [Execution](#execution)
* [Advanced execution options](#advanced-execution-options)
* [`--quiet`](#--quiet)
* [Viewing error counting workflow ouput](#viewing-error-counting-workflow-ouput)
* [Viewing allele counting workflow ouput](#viewing-allele-counting-workflow-ouput)
* [Summary output](#summary-output)
* [Excluding basecalls/indels](#excluding-basecallsindels)
* [Extended output (for model development)](#extended-output-for-model-development)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ exportModelUsingInputJson(

static const unsigned expectedJsonModelCount(1);
const unsigned jsonModelCount(indelErrorModelsJson.getIndelErrorModels().size());
if(jsonModelCount != expectedJsonModelCount)
if (jsonModelCount != expectedJsonModelCount)
{
std::ostringstream oss;
oss << "Expecting indel error model file to describe exactly " << expectedJsonModelCount
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ resetRegion(
if (_maxNormalSampleDepthForCandidateVariants > 0.)
{
_maxNormalSampleDepthForCandidateVariants = std::min(_maxNormalSampleDepthForCandidateVariants,
static_cast<double>(_opt.max_candidate_indel_depth));
static_cast<double>(_opt.max_candidate_indel_depth));
}
else
{
Expand Down Expand Up @@ -311,7 +311,7 @@ mergeIndelObservations(
const IndelCounts::Context& context,
const IndelCounts::SingleSampleCandidateVariantContextObservationPattern& indelObservation,
std::map<IndelCounts::Context,
IndelCounts::SingleSampleCandidateVariantContextObservationPattern>& mergedIndelObservations)
IndelCounts::SingleSampleCandidateVariantContextObservationPattern>& mergedIndelObservations)
{
using namespace IndelCounts;

Expand Down Expand Up @@ -673,7 +673,7 @@ process_pos_error_counts(
if (referenceSTRContext.STRRepeatCount == std::min(maxSTRRepeatCount, indelReportInfo.refRepeatCount))
{
context = IndelCounts::Context(
referenceSTRContext.patternSize, referenceSTRContext.STRRepeatCount);
referenceSTRContext.patternSize, referenceSTRContext.STRRepeatCount);
}
}

Expand Down
8 changes: 4 additions & 4 deletions src/c++/lib/blt_util/test/logSumUtilTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ BOOST_AUTO_TEST_CASE( benchmarkLogSums )
TimeTracker tt;
tt.resume();
double sum(0);
for (unsigned i(0); i<repeatCount;++i)
for (unsigned i(0); i<repeatCount; ++i)
{
for (double value(startValue); value > minValue; value *= valueFactor)
{
Expand All @@ -164,7 +164,7 @@ BOOST_AUTO_TEST_CASE( benchmarkLogSums )
TimeTracker tt;
tt.resume();
double sum(0);
for (unsigned i(0); i<repeatCount;++i)
for (unsigned i(0); i<repeatCount; ++i)
{
for (double value(startValue); value > minValue; value *= valueFactor)
{
Expand All @@ -180,7 +180,7 @@ BOOST_AUTO_TEST_CASE( benchmarkLogSums )
TimeTracker tt;
tt.resume();
float sum(0);
for (unsigned i(0); i<repeatCount;++i)
for (unsigned i(0); i<repeatCount; ++i)
{
for (float value(startValue); value > minValue; value *= valueFactor)
{
Expand All @@ -195,7 +195,7 @@ BOOST_AUTO_TEST_CASE( benchmarkLogSums )
TimeTracker tt;
tt.resume();
float sum(0);
for (unsigned i(0); i<repeatCount;++i)
for (unsigned i(0); i<repeatCount; ++i)
{
for (float value(startValue); value > minValue; value *= valueFactor)
{
Expand Down
24 changes: 12 additions & 12 deletions src/c++/lib/errorAnalysis/BasecallCounts.hh
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ struct Context
Archive& ar,
const unsigned /* version */)
{
ar & repeatCount;
ar& repeatCount;
}

unsigned repeatCount = 1;
Expand Down Expand Up @@ -146,10 +146,10 @@ struct SingleSampleContextObservationPattern
Archive& ar,
const unsigned /* version */)
{
ar & strand0.refAlleleCount;
ar & strand0.altAlleleCount;
ar & strand1.refAlleleCount;
ar & strand1.altAlleleCount;
ar& strand0.refAlleleCount;
ar& strand0.altAlleleCount;
ar& strand1.refAlleleCount;
ar& strand1.altAlleleCount;
}


Expand Down Expand Up @@ -344,12 +344,12 @@ struct ContextData
{
// adding error.data instead of error here to reduce the total
// serialization template depth:
ar & counts.data;
ar & counts.refAlleleBasecallErrorPhredProbs;
ar & excludedRegionSkipped;
ar & depthSkipped;
ar & emptySkipped;
ar & noiseSkipped;
ar& counts.data;
ar& counts.refAlleleBasecallErrorPhredProbs;
ar& excludedRegionSkipped;
ar& depthSkipped;
ar& emptySkipped;
ar& noiseSkipped;
}

SingleSampleContextData counts;
Expand Down Expand Up @@ -423,7 +423,7 @@ public:
Archive& ar,
const unsigned /* version */)
{
ar & _data;
ar& _data;
}

private:
Expand Down
4 changes: 2 additions & 2 deletions src/c++/lib/errorAnalysis/IndelCounts.hh
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@ struct Context
Archive& ar,
const unsigned /* version */)
{
ar & repeatPatternSize;
ar & repeatCount;
ar& repeatPatternSize;
ar& repeatCount;
}

unsigned getRepeatPatternSize() const
Expand Down
4 changes: 2 additions & 2 deletions src/python/scoringModelTraining/germline/bin/evs_learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def getDataSet(inputs, args) :
print "Reading '%s'" % (inputFile)
df = pandas.read_csv(inputFile, na_values=".")
df.fillna("0", inplace=True)
# Remove false negatives before any subsampling:
# Remove false negatives before any subsampling:
df = df[df["tag"] != "FN"]

if args.sample_input:
Expand All @@ -129,7 +129,7 @@ def getDataSet(inputs, args) :
else:
print "TP: %d FP: %d" % (tps.shape[0], fps.shape[0])
if tps.shape[0] < fps.shape[0]:
rows_selected = random.sample(fps.index, tps.shape[0])
rows_selected = random.sample(fps.index, tps.shape[0])
fps = pandas.DataFrame(fps.ix[rows_selected])
elif fps.shape[0] < tps.shape[0]:
rows_selected = random.sample(tps.index, fps.shape[0])
Expand Down
2 changes: 1 addition & 1 deletion src/python/scoringModelTraining/somatic/bin/evs_learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def getDataSet(inputs, sample_input, balance_per_sample) :
rows_selected = random.sample(fps.index, tps.shape[0])
fps = pandas.DataFrame(fps.ix[rows_selected])
elif fps.shape[0] < tps.shape[0]:
rows_selected = random.sample(tps.index, fps.shape[0])
rows_selected = random.sample(tps.index, fps.shape[0])
tps = pandas.DataFrame(tps.ix[rows_selected])
print "Downsampled to TP: %d FP: %d" % (tps.shape[0], fps.shape[0])
df = pandas.concat([tps, fps])
Expand Down

0 comments on commit 5daae2b

Please sign in to comment.