Skip to content

Commit 1e98c6d

Browse files
keysmasheslbergelson
authored andcommitted
Update references to old-style arguments (#5063)
* Fixing references to old style arguments that were still in documentation and error messages.
1 parent 6e44c60 commit 1e98c6d

22 files changed

+60
-59
lines changed

src/main/java/org/broadinstitute/hellbender/tools/FixCallSetSampleOrdering.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ private Map<Path, String> loadGvcfToHeaderSampleMap() {
175175

176176
if ( ! gvcfPathsFromSampleNameMap.isEmpty() ) {
177177
throw new SampleNameFixingCannotProceedException("Not all GVCF paths from the --" + GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME +
178-
" were found in the provided --gvcfToHeaderSampleMapFile");
178+
" were found in the provided --gvcf-to-header-sample-map-file");
179179
}
180180

181181
return mapping;
@@ -273,11 +273,11 @@ private void assertThatTheyReallyWantToProceed(){
273273
"You should be certain you want to do this before proceeding.\n" +
274274
"If the following description does not apply to your VCF then the newly generated vcf will be \n\n \t\tHORRIBLY CORRUPTED: by having its sample names shuffled so that the genotypes don't correspond to the correct samples\n\n" +
275275
"1: your vcf was generated using a GenomicsDBImport released before gatk version 4.beta.6\n" +
276-
"2: you set --batchSize != 0 when running GenomicsDBImport\n" +
276+
"2: you set --batch-size != 0 when running GenomicsDBImport\n" +
277277
"3: your callset was imported in multiple batches, i.e. your number of samples > batchSize\n" +
278278
"4: you supplied the exact same sampleNameMap file and batch size you used in the initial GenomicsDBImport\n" +
279279
"or:\n" +
280-
"1. you ran GenomicsDBImport with --readerThreads > 1, and at least one sample name as declared\n" +
280+
"1. you ran GenomicsDBImport with --reader-threads > 1, and at least one sample name as declared\n" +
281281
" in a GVCF header did not match the sample name specified for that file in the sample name map file\n" +
282282
" provided to GenomicsDBImport\n\n" +
283283

src/main/java/org/broadinstitute/hellbender/tools/HaplotypeCallerSpark.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@
6262
*
6363
* <p>This is an implementation of {@link HaplotypeCaller} using spark to distribute the computation.
6464
* It is still in an early stage of development and does not yet support all the options that the non-spark version does.
65-
* Specifically it does not support the --dbsnp, --comp, and --bamOutput options.</p>
65+
* Specifically it does not support the --dbsnp, --comp, and --bam-output options.</p>
6666
*
6767
* <h3>Usage Example</h3>
6868
* <pre>

src/main/java/org/broadinstitute/hellbender/tools/spark/ApplyBQSRSpark.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,8 @@
4242
* gatk ApplyBQSRSpark \
4343
* -I gs://my-gcs-bucket/input.bam \
4444
* -bqsr gs://my-gcs-bucket/recalibration.table \
45-
* -SQQ 10 -SQQ 20 -SQQ 30 -SQQ 40 \
45+
* --static-quantized-quals 10 --static-quantized-quals 20 \
46+
* --static-quantized-quals 30 --static-quantized-quals 40 \
4647
* -O gs://my-gcs-bucket/output.bam \
4748
* -- \
4849
* --sparkRunner GCS \

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PSFilterArgumentCollection.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -291,10 +291,10 @@ public void doReadFilterArgumentWarnings(final GATKReadFilterPluginDescriptor pl
291291
for (final ReadFilter filter : readFilters) {
292292
if (filter.getClass().isAssignableFrom(AmbiguousBaseReadFilter.class)) {
293293
logger.warn("Detected the use of AmbiguousBaseReadFilter, which is applied before the PathSeq " +
294-
"base masking steps. Did you mean to use --maxMaskedBases, which is applied after masking?");
294+
"base masking steps. Did you mean to use --max-masked-bases, which is applied after masking?");
295295
} else if (filter.getClass().isAssignableFrom(ReadLengthReadFilter.class)) {
296296
logger.warn("Detected the use of ReadLengthReadFilter, which is applied before the PathSeq " +
297-
"clipping steps. Did you mean to use --minClippedReadLength, which is applied after clipping?");
297+
"clipping steps. Did you mean to use --min-clipped-read-length, which is applied after clipping?");
298298
}
299299
}
300300
}

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PSScorer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ static JavaRDD<Iterable<GATKRead>> groupReadsIntoPairs(final JavaRDD<GATKRead> p
107107
} else if (unpairedReads != null) {
108108
groupedReads = unpairedReads.map(Collections::singletonList);
109109
} else {
110-
throw new UserException.BadInput("No reads were loaded. Ensure --pairedInput and/or --unpairedInput are set and valid.");
110+
throw new UserException.BadInput("No reads were loaded. Ensure --paired-input and/or --unpaired-input are set and valid.");
111111
}
112112
return groupedReads;
113113
}

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PSUtils.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public static int pathseqGetRecommendedNumReducers(final String inputPath, final
6767

6868
/**
6969
* Returns a deep copy of the input header with an empty sequence dictionary, and logs warnings if the input may
70-
* be aligned but --isHostAligned was not set to true (or vice versa).
70+
* be aligned but --is-host-aligned was not set to true (or vice versa).
7171
*/
7272
public static SAMFileHeader checkAndClearHeaderSequences(final SAMFileHeader inputHeader, final PSFilterArgumentCollection filterArgs, final Logger logger) {
7373

@@ -79,10 +79,10 @@ public static SAMFileHeader checkAndClearHeaderSequences(final SAMFileHeader inp
7979
final SAMFileHeader header = inputHeader.clone();
8080

8181
if (filterArgs.alignedInput && (header.getSequenceDictionary() == null || header.getSequenceDictionary().isEmpty())) {
82-
logger.warn("--isHostAligned is true but the BAM header contains no sequences");
82+
logger.warn("--is-host-aligned is true but the BAM header contains no sequences");
8383
}
8484
if (!filterArgs.alignedInput && header.getSequenceDictionary() != null && !header.getSequenceDictionary().isEmpty()) {
85-
logger.warn("--isHostAligned is false but there are one or more sequences in the BAM header");
85+
logger.warn("--is-host-aligned is false but there are one or more sequences in the BAM header");
8686
}
8787

8888
//Clear header sequences

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PathSeqBuildReferenceTaxonomy.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ public class PathSeqBuildReferenceTaxonomy extends CommandLineProgram {
122122
public Object doWork() {
123123

124124
if (refseqCatalogPath == null && genbankCatalogPath == null) {
125-
throw new UserException.BadInput("At least one of --refseqCatalogPath or --genbankCatalogPath must be specified");
125+
throw new UserException.BadInput("At least one of --refseq-catalog or --genbank-catalog must be specified");
126126
}
127127

128128
logger.info("Parsing reference and files... (this may take a few minutes)");

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PathSeqBwaSpark.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,15 +212,15 @@ private boolean alignBam(final String inputBamPath, final PSBwaAlignerSpark alig
212212
protected void runTool(final JavaSparkContext ctx) {
213213

214214
if (!readArguments.getReadFiles().isEmpty()) {
215-
throw new UserException.BadInput("Please use --pairedInput or --unpairedInput instead of --input");
215+
throw new UserException.BadInput("Please use --paired-input or --unpaired-input instead of --input");
216216
}
217217
final ReadsSparkSource readsSource = new ReadsSparkSource(ctx, readArguments.getReadValidationStringency());
218218

219219
final PSBwaAlignerSpark aligner = new PSBwaAlignerSpark(ctx, bwaArgs);
220220
boolean bPairedSuccess = alignBam(inputPaired, aligner, true, ctx, readsSource);
221221
boolean bUnpairedSuccess = alignBam(inputUnpaired, aligner, false, ctx, readsSource);
222222
if (!bPairedSuccess && !bUnpairedSuccess) {
223-
throw new UserException.BadInput("No reads were loaded. Ensure --pairedInput and/or --unpairedInput are set and valid.");
223+
throw new UserException.BadInput("No reads were loaded. Ensure --paired-input and/or --unpaired-input are set and valid.");
224224
}
225225
aligner.close();
226226
}

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PathSeqPipelineSpark.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@
9393
* <h4>Local mode:</h4>
9494
*
9595
* <pre>
96-
* gatk PathSeqFilterSpark \
96+
* gatk PathSeqPipelineSpark \
9797
* --input input_reads.bam \
9898
* --kmer-file host_kmers.bfi \
9999
* --filter-bwa-image host_reference.img \
@@ -112,7 +112,7 @@
112112
* <h4>Spark cluster on Google Cloud DataProc with 6 16-core / 208GB memory worker nodes:</h4>
113113
*
114114
* <pre>
115-
* gatk PathSeqFilterSpark \
115+
* gatk PathSeqPipelineSpark \
116116
* --input gs://my-gcs-bucket/input_reads.bam \
117117
* --kmer-file hdfs://my-cluster-m:8020//host_kmers.bfi \
118118
* --filter-bwa-image /references/host_reference.img \

src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PathSeqScoreSpark.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ static SAMFileHeader joinBamHeaders(final SAMFileHeader pairedHeader, final SAMF
186186
protected void runTool(final JavaSparkContext ctx) {
187187

188188
if (!readArguments.getReadFiles().isEmpty()) {
189-
throw new UserException.BadInput("Please use --pairedInput or --unpairedInput instead of --input");
189+
throw new UserException.BadInput("Please use --paired-input or --unpaired-input instead of --input");
190190
}
191191

192192
final ReadsSparkSource readsSource = new ReadsSparkSource(ctx, readArguments.getReadValidationStringency());

0 commit comments

Comments
 (0)