[med-svn] [Git][med-team/picard-tools][master] 3 commits: nw upstream release 2.22.8
Olivier Sallou
gitlab at salsa.debian.org
Tue Jun 2 06:37:53 BST 2020
Olivier Sallou pushed to branch master at Debian Med / picard-tools
Commits:
b73a81a3 by Olivier Sallou at 2020-06-02T05:34:59+00:00
nw upstream release 2.22.8
- - - - -
e23d33e0 by Olivier Sallou at 2020-06-02T05:35:36+00:00
add TODO
- - - - -
44ca3a86 by Olivier Sallou at 2020-06-02T05:37:42+00:00
merge
- - - - -
10 changed files:
- debian/changelog
- debian/control
- debian/patches/10-build.xml.patch
- debian/patches/20-shadowjar.patch
- debian/patches/30-tests-fix-dataprovider.patch
- debian/patches/40-tests-fix-testng.patch
- debian/patches/50-tests-exclude-Barclay.patch
- debian/patches/fix_unreadabletests.patch
- debian/patches/remove_google_nio.patch
- debian/patches/series
Changes:
=====================================
debian/changelog
=====================================
@@ -1,5 +1,8 @@
picard-tools (2.22.8+dfsg-1) UNRELEASED; urgency=medium
+ * New upstream release
+ [TODO]
+ fails to compile, need new htsjdk version
* New upstream release
* Source only upload
Closes: #961865
=====================================
debian/control
=====================================
@@ -13,6 +13,8 @@ Build-Depends: default-jdk (>= 2:1.9~),
gradle-debian-helper,
maven-repo-helper,
libguava-java (>= 15.0),
+ libcommons-collections4-java,
+ libcommons-io-java,
libcommons-lang3-java,
libcommons-math3-java,
libbarclay-java,
=====================================
debian/patches/10-build.xml.patch
=====================================
@@ -11,64 +11,40 @@ Last-Updated: 2016-07-07
- id 'jacoco'
id 'application'
- id 'com.palantir.git-version' version '0.5.1'
-- id 'com.github.johnrengelman.shadow' version '1.2.3'
+- id 'com.github.johnrengelman.shadow' version '5.1.0'
- id "com.github.kt3k.coveralls" version '2.6.3'
-- id 'org.ajoberstar.grgit' version '1.4.2'
-- id 'org.ajoberstar.github-pages' version '1.4.2'
+- id "org.ajoberstar.grgit" version "4.0.0-rc.1"
+- id "org.ajoberstar.git-publish" version "2.1.1"
}
mainClassName = "picard.cmdline.PicardCommandLine"
-@@ -31,6 +25,7 @@
+@@ -63,7 +57,6 @@
}
}
-+/*
- jacocoTestReport {
- dependsOn test
- group = "Reporting"
-@@ -46,21 +41,22 @@
- jacoco {
- toolVersion = "0.7.5.201505241946"
- }
-+*/
-
- final requiredJavaVersion = "8"
- final buildPrerequisitesMessage = "See https://github.com/broadinstitute/picard/blob/master/README.md#building-picard for information on how to build picard"
- // Ensure that we have the right JDK version, a clone of the git repository
- def ensureBuildPrerequisites(requiredJavaVersion, buildPrerequisitesMessage) {
- // Make sure we can get a ToolProvider class loader. If not we may have just a JRE, or a JDK from the future.
-- if (ToolProvider.getSystemToolClassLoader() == null) {
-- throw new GradleException(
-- "The ClassLoader obtained from the Java ToolProvider is null. "
-- + "A Java $requiredJavaVersion JDK must be installed. $buildPrerequisitesMessage")
-- }
-- if (!file(".git").isDirectory()) {
-- throw new GradleException("The Picard Github repository must be cloned using \"git clone\" to run the build. "
-- + "$buildPrerequisitesMessage")
-- }
-+ //if (ToolProvider.getSystemToolClassLoader() == null) {
-+ // throw new GradleException(
-+ // "The ClassLoader obtained from the Java ToolProvider is null. "
-+ // + "A Java $requiredJavaVersion JDK must be installed. $buildPrerequisitesMessage")
-+ //}
-+ //if (!file(".git").isDirectory()) {
-+ // throw new GradleException("The Picard Github repository must be cloned using \"git clone\" to run the build. "
-+ // + "$buildPrerequisitesMessage")
-+ //}
- }
- ensureBuildPrerequisites(requiredJavaVersion, buildPrerequisitesMessage)
+-ensureBuildPrerequisites(buildPrerequisitesMessage)
-@@ -73,7 +69,7 @@
+ final htsjdkVersion = System.getProperty('htsjdk.version', '2.21.3')
+ final googleNio = 'com.google.cloud:google-cloud-nio:0.107.0-alpha:shaded'
+@@ -71,7 +64,7 @@
// Get the jdk files we need to run javaDoc. We need to use these during compile, testCompile,
// test execution, and gatkDoc generation, but we don't want them as part of the runtime
// classpath and we don't want to redistribute them in the uber jar.
--final javadocJDKFiles = files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs())
-+//final javadocJDKFiles = files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs())
+-final javadocJDKFiles = ToolProvider.getSystemToolClassLoader() == null ? files([]) : files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs())
++//final javadocJDKFiles = ToolProvider.getSystemToolClassLoader() == null ? files([]) : files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs())
configurations {
cloudConfiguration {
-@@ -95,8 +91,8 @@
- compileOnly googleNio
+@@ -87,6 +80,7 @@
+ exclude module: 'htsjdk'
+ }
+ compile 'com.google.guava:guava:15.0'
++ compile 'commons-io:commons-io:debian'
+ compile 'org.apache.commons:commons-math3:3.5'
+ compile 'org.apache.commons:commons-collections4:4.3'
+ compile 'commons-lang:commons-lang:2.6'
+@@ -97,8 +91,8 @@
+ }
// javadoc utilities; compile/test only to prevent redistribution of sdk jars
- compileOnly(javadocJDKFiles)
@@ -76,9 +52,9 @@ Last-Updated: 2016-07-07
+ //compileOnly(javadocJDKFiles)
+ //testCompile(javadocJDKFiles)
- testCompile 'org.testng:testng:6.9.10'
+ testCompile 'org.testng:testng:6.14.3'
testCompile 'org.apache.commons:commons-lang3:3.6'
-@@ -112,9 +108,18 @@
+@@ -114,9 +108,18 @@
sourceCompatibility = 1.8
targetCompatibility = 1.8
@@ -99,7 +75,7 @@ Last-Updated: 2016-07-07
logger.info("build for version:" + version)
group = 'com.github.broadinstitute'
-@@ -166,6 +171,7 @@
+@@ -168,6 +171,7 @@
tasks.withType(Javadoc) {
// do this for all javadoc tasks, including gatkDoc
options.addStringOption('Xdoclint:none')
@@ -107,17 +83,16 @@ Last-Updated: 2016-07-07
}
javadoc {
-@@ -192,7 +198,8 @@
+@@ -194,7 +198,7 @@
// The gatkDoc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, as well as jdk javadoc files such as tools.jar, where com.sun.javadoc lives.
- classpath = sourceSets.main.runtimeClasspath + javadocJDKFiles
-+ //classpath = sourceSets.main.runtimeClasspath + javadocJDKFiles
+ classpath = sourceSets.main.runtimeClasspath
options.docletpath = classpath.asType(List)
options.doclet = "picard.util.help.PicardHelpDoclet"
-@@ -279,6 +286,8 @@
+@@ -281,6 +285,8 @@
}
}
@@ -126,20 +101,57 @@ Last-Updated: 2016-07-07
// set heap size for the test JVM(s)
minHeapSize = "1G"
maxHeapSize = "2G"
-@@ -415,6 +424,7 @@
- into "$htmlDir/picarddoc"
+@@ -319,30 +325,18 @@
+ }
}
-+/*
- task updateGhPages(dependsOn: ['copyJavadoc', 'copyPicardDoc']){
- outputs.dir htmlDir
+-jacocoTestReport {
+- dependsOn legacyTest
+- group = "Reporting"
+- description = "Generate Jacoco coverage reports after running tests."
+- getAdditionalSourceDirs().from(sourceSets.main.allJava.srcDirs)
+-
+- reports {
+- xml.enabled = true // coveralls plugin depends on xml format report
+- html.enabled = true
+- }
+-}
+-
+ wrapper {
+ gradleVersion = '5.6'
+ }
+
+ task javadocJar(type: Jar) {
+- archiveClassifier.set('javadoc')
++ //archiveClassifier.set('javadoc')
+ from 'build/docs/javadoc'
+ }
+
+ task sourcesJar(type: Jar) {
+ from sourceSets.main.allSource
+- archiveClassifier.set('sources')
++ //archiveClassifier.set('sources')
}
-@@ -430,3 +440,4 @@
- into 'newdocs'
- }
+
+ /**
+@@ -426,20 +420,3 @@
+ from 'build/docs/picarddoc'
+ into "$htmlDir/picarddoc"
}
-+*/
---- /dev/null
-+++ b/gradle.properties
-@@ -0,0 +1 @@
-+rootName=picard
+-
+-task updateGhPages(dependsOn: ['copyJavadoc', 'copyPicardDoc']){
+- outputs.dir htmlDir
+-}
+-
+-updateGhPages.finalizedBy gitPublishPush
+-
+-gitPublish {
+- repoUri = 'git at github.com:broadinstitute/picard.git'
+- branch = 'gh-pages'
+- preserve { include '**/*' }
+- contents {
+- from('build/docs/html') {
+- into 'newdocs'
+- }
+- }
+-}
=====================================
debian/patches/20-shadowjar.patch
=====================================
@@ -2,7 +2,7 @@ Description: do not use shadowjar
Author: Sascha Steinbiss <satta at debian.org>
--- a/build.gradle
+++ b/build.gradle
-@@ -126,7 +126,7 @@
+@@ -125,7 +125,7 @@
defaultTasks 'all'
@@ -11,7 +11,7 @@ Author: Sascha Steinbiss <satta at debian.org>
// Source file names for the picard command line properties file. We select and include only one of
// these two files in each jar, renamed to "picardCmdLine.properties", depending on which parser we
-@@ -213,6 +213,7 @@
+@@ -211,6 +211,7 @@
options.addStringOption("verbose")
}
@@ -19,7 +19,7 @@ Author: Sascha Steinbiss <satta at debian.org>
task currentJar(type: Copy){
from shadowJar
into new File(buildDir, "libs")
-@@ -230,8 +231,7 @@
+@@ -228,8 +229,7 @@
}
}
}
=====================================
debian/patches/30-tests-fix-dataprovider.patch
=====================================
@@ -2,47 +2,64 @@ Description: fix data provider requirements
Author: Sascha Steinbiss <satta at debian.org>
--- a/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
+++ b/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
-@@ -21,22 +21,22 @@ public class CollectHsMetricsTest extend
+@@ -31,33 +31,33 @@
@DataProvider(name = "collectHsMetricsDataProvider")
public Object[][] targetedIntervalDataProvider() {
- final String referenceFile = TEST_DIR + "/chrM.fasta";
- final String intervals = TEST_DIR + "/chrM.interval_list";
+- final String halfIntervals = TEST_DIR + "/chrM_100bp.interval_list";
- final String twoSmallIntervals = TEST_DIR + "/two-small.interval_list";
+ final String referenceFile = TEST_DIR.getAbsolutePath() + "/chrM.fasta";
+ final String intervals = TEST_DIR.getAbsolutePath() + "/chrM.interval_list";
++ final String halfIntervals = TEST_DIR.getAbsolutePath() + "/chrM_100bp.interval_list";
+ final String twoSmallIntervals = TEST_DIR.getAbsolutePath() + "/two-small.interval_list";
return new Object[][] {
// two reads, each has 100 bases. bases in one read are medium quality (20), in the other read poor quality (10).
// test that we exclude half of the bases
-- {TEST_DIR + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1, 1000},
-+ {TEST_DIR.getAbsolutePath() + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1, 1000},
+- {TEST_DIR + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.0, 0.50, 0.0, 1, 0, 200, 1000},
++ {TEST_DIR.getAbsolutePath() + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.0, 0.50, 0.0, 1, 0, 200, 1000},
// test that read 2 (with mapping quality 1) is filtered out with minimum mapping quality 2
-- {TEST_DIR + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.505, 0.0, 1, 1000},
-+ {TEST_DIR.getAbsolutePath() + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.505, 0.0, 1, 1000},
+- {TEST_DIR + "/lowbaseq.sam", halfIntervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.0, 1.0, 0.0, 1, 1, 200, 1000},
++ {TEST_DIR.getAbsolutePath() + "/lowbaseq.sam", halfIntervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.0, 1.0, 0.0, 1, 1, 200, 1000},
+ // test that read 2 (with mapping quality 1) is filtered out with minimum mapping quality 2 with an interval that is completely covered
+- {TEST_DIR + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.0, 0.505, 0.0, 1, 0, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.0, 0.505, 0.0, 1, 0, 202, 1000},
// test that we clip overlapping bases
-- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.505, 0, 1, 1000},
-+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.505, 0, 1, 1000},
+- {TEST_DIR + "/lowmapq.sam", halfIntervals, 2, 0, true, 2, 202, 0, 0.0, 0.00495, 1.0, 0.0, 1, 1, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/lowmapq.sam", halfIntervals, 2, 0, true, 2, 202, 0, 0.0, 0.00495, 1.0, 0.0, 1, 1, 202, 1000},
+ // test that we clip overlapping bases with an interval that is completely covered
+- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.0, 0.505, 0, 1, 0, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.0, 0.505, 0, 1, 0, 202, 1000},
// test that we do not clip overlapping bases
-- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.505, 0.505, 2, 1000},
-+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.505, 0.505, 2, 1000},
+- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.0, 0.505, 0.505, 2, 0, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.0, 0.505, 0.505, 2, 0, 202, 1000},
+ // test that we exclude half of the bases (due to poor quality) with an interval that is completely covered
+- {TEST_DIR + "/overlapping.sam", halfIntervals, 0, 0, true, 2, 202, 0, 0.5, 0.00495, 1.0, 0, 1, 1, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", halfIntervals, 0, 0, true, 2, 202, 0, 0.5, 0.00495, 1.0, 0, 1, 1, 202, 1000},
+ // test that we do not clip overlapping bases with an interval that is completely covered
+- {TEST_DIR + "/overlapping.sam", halfIntervals, 0, 0, false, 2, 202, 0, 0.0, 0.009901, 1.0, 1.0, 2, 2, 202, 1000},
++ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", halfIntervals, 0, 0, false, 2, 202, 0, 0.0, 0.009901, 1.0, 1.0, 2, 2, 202, 1000},
// A read 10 base pairs long. two intervals: one maps identically to the read, other does not overlap at all
-- {TEST_DIR + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1, 1000 }
-+ {TEST_DIR.getAbsolutePath() + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1, 1000 }
-
+- {TEST_DIR + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.0, 0.5, 0.0, 1, 0, 10, 1000},
++ {TEST_DIR.getAbsolutePath() + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.0, 0.5, 0.0, 1, 0, 10, 1000},
+ // test that we can figure out low quality and off target in the same bam (low quality is identified first)
+- {TEST_DIR + "/someLowbaseq.sam", twoSmallIntervals, 0, 21, true, 2, 200, 150D / 200, 0D, 40D / 200, 1 / 2D, 0D, 1, 0, 200, 1000},
++ {TEST_DIR.getAbsolutePath() + "/someLowbaseq.sam", twoSmallIntervals, 0, 21, true, 2, 200, 150D / 200, 0D, 40D / 200, 1 / 2D, 0D, 1, 0, 200, 1000},
};
}
-@@ -56,7 +56,7 @@ public class CollectHsMetricsTest extend
- final long maxTargetCoverage,
- final int sampleSize) throws IOException {
+
+@@ -99,7 +99,7 @@
+ final long pfBases,
+ final int sampleSize) throws IOException {
- final File outfile = File.createTempFile("CollectHsMetrics", ".hs_metrics", TEST_DIR);
+ final File outfile = File.createTempFile("CollectHsMetrics", ".hs_metrics", TEST_DIR.getAbsolutePath());
outfile.deleteOnExit();
final String[] args = new String[] {
-@@ -99,14 +99,14 @@ public class CollectHsMetricsTest extend
+@@ -141,14 +141,14 @@
* Test that the depth histogram is [10,10,0,...,0]
*/
=====================================
debian/patches/40-tests-fix-testng.patch
=====================================
@@ -7,7 +7,7 @@ Description: Fix testng version and disable some tests.
This patch disable some tests around this
--- a/build.gradle
+++ b/build.gradle
-@@ -74,9 +74,6 @@ final googleNio = 'org.broadinstitute:go
+@@ -69,9 +69,6 @@
configurations {
cloudConfiguration {
extendsFrom runtime
@@ -17,24 +17,26 @@ Description: Fix testng version and disable some tests.
}
}
-@@ -88,14 +85,13 @@ dependencies {
- compile 'org.apache.commons:commons-math3:3.5'
+@@ -85,16 +82,13 @@
+ compile 'commons-lang:commons-lang:2.6'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
compile 'org.broadinstitute:barclay:2.0.0'
-- compileOnly googleNio
+- compileOnly(googleNio) {
+- transitive = false
+- }
// javadoc utilities; compile/test only to prevent redistribution of sdk jars
//compileOnly(javadocJDKFiles)
//testCompile(javadocJDKFiles)
-- testCompile 'org.testng:testng:6.9.10'
+- testCompile 'org.testng:testng:6.14.3'
- testCompile 'org.apache.commons:commons-lang3:3.6'
+ testCompile 'org.testng:testng:debian'
+ compile 'org.apache.commons:commons-lang3:debian'
}
configurations.all {
-@@ -233,6 +229,7 @@ shadowJar {
+@@ -231,6 +225,7 @@
}
*/
@@ -42,7 +44,7 @@ Description: Fix testng version and disable some tests.
task cloudJar(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
configurations = [project.configurations.cloudConfiguration]
from project.sourceSets.main.output
-@@ -257,6 +254,7 @@ task barclayShadowJar(type: com.github.j
+@@ -255,6 +250,7 @@
}
archiveName 'picardBarclay.jar'
}
@@ -50,7 +52,7 @@ Description: Fix testng version and disable some tests.
// Run the tests using the legacy parser only. Assumes that test code is written using
// legacy command line parser syntax.
-@@ -267,6 +265,8 @@ task legacyTest(type: Test)
+@@ -265,6 +261,8 @@
task barclayTest(type: Test) {
systemProperty 'picard.convertCommandLine', 'true'
systemProperty 'picard.useLegacyParser', 'false'
@@ -59,7 +61,7 @@ Description: Fix testng version and disable some tests.
}
// Run tests using both the legacy and barclay command line parsers.
-@@ -278,6 +278,9 @@ tasks.withType(Test) {
+@@ -276,6 +274,9 @@
outputs.upToDateWhen { false } // tests will always rerun
description = "Runs the unit tests"
@@ -71,24 +73,27 @@ Description: Fix testng version and disable some tests.
excludeGroups "slow", "broken"
--- a/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
+++ b/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
-@@ -41,51 +41,6 @@ public class CollectHsMetricsTest extend
- };
+@@ -81,54 +81,6 @@
+ return f;
}
- @Test(dataProvider = "collectHsMetricsDataProvider")
- public void runCollectHsMetricsTest(final String input,
-- final String targetIntervals,
-- final int minimumMappingQuality,
-- final int minimumBaseQuality,
-- final boolean clipOverlappingReads,
-- final int totalReads,
-- final int pfUqBasesAligned,
-- final double pctExcBaseq,
-- final double pctExcOverlap,
-- final double pctTargetBases1x,
-- final double pctTargetBases2x,
-- final long maxTargetCoverage,
-- final int sampleSize) throws IOException {
+- final String targetIntervals,
+- final int minimumMappingQuality,
+- final int minimumBaseQuality,
+- final boolean clipOverlappingReads,
+- final int totalReads,
+- final int pfUqBasesAligned,
+- final double pctExcBaseq,
+- final double pctExcOverlap,
+- final double pctExcOffTarget,
+- final double pctTargetBases1x,
+- final double pctTargetBases2x,
+- final long maxTargetCoverage,
+- final long minTargetCoverage,
+- final long pfBases,
+- final int sampleSize) throws IOException {
-
- final File outfile = File.createTempFile("CollectHsMetrics", ".hs_metrics", TEST_DIR.getAbsolutePath());
- outfile.deleteOnExit();
@@ -106,24 +111,24 @@ Description: Fix testng version and disable some tests.
-
- Assert.assertEquals(runPicardCommandLine(args), 0);
-
-- final MetricsFile<HsMetrics, Comparable<?>> output = new MetricsFile<HsMetrics, Comparable<?>>();
-- output.read(new FileReader(outfile));
+- final HsMetrics metrics = readMetrics(outfile);
+- Assert.assertEquals(metrics.TOTAL_READS, totalReads, "TOTAL_READS");
+- Assert.assertEquals(metrics.PF_UQ_BASES_ALIGNED, pfUqBasesAligned, "PF_UQ_BASES_ALIGNED");
+- Assert.assertEquals(metrics.PCT_EXC_BASEQ, pctExcBaseq, "PCT_EXC_BASEQ");
+- Assert.assertEquals(metrics.PCT_EXC_OVERLAP, pctExcOverlap, "PCT_EXC_OVERLAP");
+- Assert.assertEquals(metrics.PCT_EXC_OFF_TARGET, pctExcOffTarget, "PCT_EXC_OFF_TARGET");
+- Assert.assertEquals(metrics.PCT_TARGET_BASES_1X, pctTargetBases1x, "PCT_TARGET_BASES_1X");
+- Assert.assertEquals(metrics.PCT_TARGET_BASES_2X, pctTargetBases2x, "PCT_TARGET_BASES_2X");
+- Assert.assertEquals(metrics.MAX_TARGET_COVERAGE, maxTargetCoverage, "MAX_TARGET_COVERAGE");
+- Assert.assertEquals(metrics.MIN_TARGET_COVERAGE, minTargetCoverage, "MIN_TARGET_COVERAGE");
+- Assert.assertEquals(metrics.PF_BASES, pfBases, "PF_BASES");
-
-- for (final HsMetrics metrics : output.getMetrics()) {
-- // overlap
-- Assert.assertEquals(metrics.TOTAL_READS, totalReads);
-- Assert.assertEquals(metrics.PF_UQ_BASES_ALIGNED, pfUqBasesAligned);
-- Assert.assertEquals(metrics.PCT_EXC_BASEQ, pctExcBaseq);
-- Assert.assertEquals(metrics.PCT_EXC_OVERLAP, pctExcOverlap);
-- Assert.assertEquals(metrics.PCT_TARGET_BASES_1X, pctTargetBases1x);
-- Assert.assertEquals(metrics.PCT_TARGET_BASES_2X, pctTargetBases2x);
-- Assert.assertEquals(metrics.MAX_TARGET_COVERAGE, maxTargetCoverage);
-- }
- }
-
+-
@Test
public void testCoverageHistogram() throws IOException {
-@@ -106,7 +61,7 @@ public class CollectHsMetricsTest extend
+
+@@ -148,7 +100,7 @@
final boolean clipOverlappingReads = true;
final int sampleSize = 10;
@@ -134,8 +139,8 @@ Description: Fix testng version and disable some tests.
final String[] args = new String[] {
--- a/src/test/java/picard/analysis/artifacts/TransitionTest.java
+++ b/src/test/java/picard/analysis/artifacts/TransitionTest.java
-@@ -34,13 +34,4 @@ public class TransitionTest {
- return new Object[][] {{Character.MIN_VALUE}, {Transition.Base.A.base - 1}, {'Z'}, {Character.MAX_VALUE}};
+@@ -39,13 +39,4 @@
+ };
}
- @Test(dataProvider = "badBases", expectedExceptions = IllegalArgumentException.class)
@@ -150,7 +155,7 @@ Description: Fix testng version and disable some tests.
}
--- a/src/test/java/picard/util/IntervalListToolsTest.java
+++ b/src/test/java/picard/util/IntervalListToolsTest.java
-@@ -105,12 +105,6 @@ public class IntervalListToolsTest exten
+@@ -127,16 +127,6 @@
};
}
@@ -159,13 +164,17 @@ Description: Fix testng version and disable some tests.
- final IntervalList il = tester(action);
- Assert.assertEquals(il.getBaseCount(), bases, "unexpected number of bases found.");
- Assert.assertEquals(il.getIntervals().size(), intervals, "unexpected number of intervals found.");
+-
+- Assert.assertEquals(testerCountOutput(action, IntervalListTools.Output.BASES), bases, "unexpected number of bases written to count_output file.");
+- Assert.assertEquals(testerCountOutput(action, IntervalListTools.Output.INTERVALS), intervals, "unexpected number of intervals written to count_output file.");
- }
-
+-
@DataProvider
public Object[][] actionAndTotalBasesWithInvertData() {
+ final long totalBasesInDict = IntervalList.fromFile(secondInput).getHeader().getSequenceDictionary().getReferenceLength();
--- a/src/test/java/picard/analysis/TheoreticalSensitivityTest.java
+++ b/src/test/java/picard/analysis/TheoreticalSensitivityTest.java
-@@ -351,10 +351,11 @@ public class TheoreticalSensitivityTest
+@@ -351,10 +351,11 @@
};
}
=====================================
debian/patches/50-tests-exclude-Barclay.patch
=====================================
@@ -9,36 +9,34 @@ Description: There used to be a tes suite error with BarclayParser
--- a/src/test/java/picard/cmdline/PicardCommandLineTest.java
+++ b/src/test/java/picard/cmdline/PicardCommandLineTest.java
-@@ -37,30 +37,9 @@ public class PicardCommandLineTest {
+@@ -43,23 +43,6 @@
//
// NOTE that it does NOT test that those tools actually work correctly, only that they aren't immediately
// rejected with a CommandLineParserInternalException from the Barclay parser.
- @Test
- public void testLaunchAllCommandLineProgramsWithBarclayParser() {
-- PicardCommandLine.processAllCommandLinePrograms(
-- Collections.singletonList("picard"),
-- (Class<CommandLineProgram> clazz, CommandLineProgramProperties clProperties) -> {
-- // Check for missing annotations
-- if (null != clProperties) {
-- try {
-- final Object commandLineProgram = clazz.newInstance();
-- try {
-- new CommandLineArgumentParser(commandLineProgram);
-- } catch (CommandLineException.CommandLineParserInternalException e) {
-- throw new RuntimeException("Barclay command line parser internal exception parsing class: " + clazz.getName(), e);
-- }
-- } catch (IllegalAccessException | InstantiationException e) {
-- throw new RuntimeException("Failure instantiating command line program: " + clazz.getName(), e);
-- }
-- }
+- allCLPS.forEach((Class<CommandLineProgram> clazz, CommandLineProgramProperties clProperties) -> {
+- // Check for missing annotations
+- Assert.assertNotNull(clProperties);
+- try {
+- final Object commandLineProgram = clazz.newInstance();
+- try {
+- new CommandLineArgumentParser(commandLineProgram);
+- } catch (CommandLineException.CommandLineParserInternalException e) {
+- throw new RuntimeException("Barclay command line parser internal exception parsing class: " + clazz.getName(), e);
- }
-- );
+- } catch (IllegalAccessException | InstantiationException e) {
+- throw new RuntimeException("Failure instantiating command line program: " + clazz.getName(), e);
+- }
+- });
- }
@Test
public void testPrintUsage() {
- Assert.assertEquals(new PicardCommandLine().instanceMain(new String[]{"-h"}), 1);
+@@ -81,4 +64,4 @@
+ });
}
+
-}
\ No newline at end of file
+}
=====================================
debian/patches/fix_unreadabletests.patch
=====================================
@@ -7,7 +7,7 @@ Author: Olivier Sallou <osallou at debian.org>
Forwarded: no
--- a/src/test/java/picard/illumina/ExtractIlluminaBarcodesTest.java
+++ b/src/test/java/picard/illumina/ExtractIlluminaBarcodesTest.java
-@@ -155,33 +155,6 @@
+@@ -159,33 +159,6 @@
Assert.assertEquals(metricsFile.getMetrics().get(12).PERFECT_MATCHES, 1);
}
@@ -43,7 +43,7 @@ Forwarded: no
* 4 cases tested:
--- a/src/test/java/picard/util/LiftoverVcfTest.java
+++ b/src/test/java/picard/util/LiftoverVcfTest.java
-@@ -1187,30 +1187,4 @@
+@@ -1335,30 +1335,4 @@
Assert.assertEquals(runPicardCommandLine(args), 1);
}
=====================================
debian/patches/remove_google_nio.patch
=====================================
@@ -4,33 +4,44 @@ Description: Remove Google cloud storage
--- a/src/main/java/picard/nio/GoogleStorageUtils.java
+++ b/src/main/java/picard/nio/GoogleStorageUtils.java
-@@ -24,12 +24,6 @@
-
+@@ -25,14 +25,6 @@
package picard.nio;
--import com.google.cloud.http.HttpTransportOptions;
+
-import com.google.cloud.storage.StorageOptions;
-import com.google.cloud.storage.contrib.nio.CloudStorageConfiguration;
-import com.google.cloud.storage.contrib.nio.CloudStorageFileSystemProvider;
+-import shaded.cloud_nio.com.google.api.client.util.Strings;
-import shaded.cloud_nio.com.google.api.gax.retrying.RetrySettings;
+-import shaded.cloud_nio.com.google.cloud.http.HttpTransportOptions;
-import shaded.cloud_nio.org.threeten.bp.Duration;
-
-
+-
/**
-@@ -49,33 +43,6 @@
+ * This class serves as a connection to google's implementation of nio support for GCS housed files.
+ *
+@@ -50,42 +42,6 @@
class GoogleStorageUtils {
public static void initialize() {
-- CloudStorageFileSystemProvider.setDefaultCloudStorageConfiguration(GoogleStorageUtils.getCloudStorageConfiguration(20));
+- // requester pays support is currently not configured
+- CloudStorageFileSystemProvider.setDefaultCloudStorageConfiguration(GoogleStorageUtils.getCloudStorageConfiguration(20, null));
- CloudStorageFileSystemProvider.setStorageOptions(GoogleStorageUtils.setGenerousTimeouts(StorageOptions.newBuilder()).build());
}
- /** The config we want to use. **/
-- private static CloudStorageConfiguration getCloudStorageConfiguration(int maxReopens) {
-- return CloudStorageConfiguration.builder()
+- private static CloudStorageConfiguration getCloudStorageConfiguration(int maxReopens, String requesterProject) {
+- CloudStorageConfiguration.Builder builder = CloudStorageConfiguration.builder()
- // if the channel errors out, re-open up to this many times
-- .maxChannelReopens(maxReopens)
-- .build();
+- .maxChannelReopens(maxReopens);
+- if (!Strings.isNullOrEmpty(requesterProject)) {
+- // enable requester pays and indicate who pays
+- builder = builder.autoDetectRequesterPays(true).userProject(requesterProject);
+- }
+-
+- // this causes the gcs filesystem to treat files that end in a / as a directory
+- // true is the default but this protects against future changes in behavior
+- builder.usePseudoDirectories(true);
+- return builder.build();
- }
-
- private static StorageOptions.Builder setGenerousTimeouts(StorageOptions.Builder builder) {
=====================================
debian/patches/series
=====================================
@@ -4,6 +4,5 @@
40-tests-fix-testng.patch
50-tests-exclude-Barclay.patch
remove_google_nio.patch
-fix_javadoc_ascii.patch
fix_unreadabletests.patch
60-tests-exclude-network.patch
View it on GitLab: https://salsa.debian.org/med-team/picard-tools/-/compare/f3cd76471f4839b07d6117e8c583dcb0dca6e8ed...44ca3a8617a0d04c16876db768e305f4f086354f
--
View it on GitLab: https://salsa.debian.org/med-team/picard-tools/-/compare/f3cd76471f4839b07d6117e8c583dcb0dca6e8ed...44ca3a8617a0d04c16876db768e305f4f086354f
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20200602/212304e3/attachment-0001.html>
More information about the debian-med-commit
mailing list