[med-svn] [Git][med-team/plink1-9][master] 3 commits: Bump changelogs
Dylan Aïssi
gitlab at salsa.debian.org
Mon Jul 6 14:47:04 BST 2020
Dylan Aïssi pushed to branch master at Debian Med / plink1.9
Commits:
679cf2e6 by Dylan Aïssi at 2020-07-06T15:44:54+02:00
Bump changelogs
- - - - -
3dad9748 by Dylan Aïssi at 2020-07-06T15:46:21+02:00
New upstream version 1.90~b6.18-200616
- - - - -
cec30f5a by Dylan Aïssi at 2020-07-06T15:46:25+02:00
Update upstream source from tag 'upstream/1.90_b6.18-200616'
Update to upstream version '1.90~b6.18-200616'
with Debian dir e792101a7142d69e42546ecfcb7b0eb0f28c8f0a
- - - - -
5 changed files:
- debian/changelog
- debian/upstream.docs/upstream.changelog
- plink.c
- plink_data.c
- plink_misc.c
Changes:
=====================================
debian/changelog
=====================================
@@ -1,3 +1,9 @@
+plink1.9 (1.90~b6.18-200616-1) UNRELEASED; urgency=medium
+
+ * New upstream release.
+
+ -- Dylan Aïssi <daissi at debian.org> Mon, 06 Jul 2020 15:43:05 +0200
+
plink1.9 (1.90~b6.17-200428-1) unstable; urgency=medium
* New upstream release.
=====================================
debian/upstream.docs/upstream.changelog
=====================================
@@ -1,6 +1,8 @@
# Copy/Paste from https://www.cog-genomics.org/plink/1.9/
-28 Apr 2020: Fixed --allow-extra-chr + --autosome-num/--cow/... segfault that could occur when the chromosome-set flag prohibited X/Y/XY/MT but that chromosome code was in the dataset anyway; an appropriate error message is now printed instead. --ld-window-r2 can now be used with --r.
+16 Jun 2020: Fixed --het bug that caused the wrong variants to be skipped when chrM was present but not at the end of the file.
+
+28 Apr: Fixed --allow-extra-chr + --autosome-num/--cow/... segfault that could occur when the chromosome-set flag prohibited X/Y/XY/MT but that chromosome code was in the dataset anyway; an appropriate error message is now printed instead. --ld-window-r2 can now be used with --r.
19 Feb: --bcf now parses header line IDX fields; previously, if e.g. a FILTER key and an INFO key were identical in a BCFv2.2 file, --bcf may have computed the wrong string index for FORMAT:GT, in which case import would fail.
=====================================
plink.c
=====================================
@@ -93,7 +93,7 @@
static const char ver_str[] =
#ifdef STABLE_BUILD
- "PLINK v1.90b6.17"
+ "PLINK v1.90b6.18"
#else
"PLINK v1.90p"
#endif
@@ -105,7 +105,7 @@ static const char ver_str[] =
#else
" 32-bit"
#endif
- " (28 Apr 2020)";
+ " (16 Jun 2020)";
static const char ver_str2[] =
// include leading space if day < 10, so character length stays the same
""
=====================================
plink_data.c
=====================================
@@ -15067,7 +15067,7 @@ int32_t report_non_biallelics(char* outname, char* outname_end, Ll_str* non_bial
if (fclose_null(&outfile)) {
goto report_non_biallelics_ret_WRITE_FAIL;
}
- LOGERRPRINTF("Error: %" PRIuPTR " variant%s with 3+ alleles present.\n* If you believe this is due to strand inconsistency, try --flip with\n %s.\n (Warning: if this seems to work, strand errors involving SNPs with A/T or C/G\n alleles probably remain in your data. If LD between nearby SNPs is high,\n --flip-scan should detect them.)\n* If you are dealing with genuine multiallelic variants, we recommend exporting\n that subset of the data to VCF (via e.g. '--recode vcf'), merging with\n another tool/script, and then importing the result; PLINK is not yet suited\n to handling them.\n", nbmarker_ct, (nbmarker_ct == 1)? "" : "s", outname);
+ LOGERRPRINTF("Error: %" PRIuPTR " variant%s with 3+ alleles present.\n* If you believe this is due to strand inconsistency, try --flip with\n %s.\n (Warning: if this seems to work, strand errors involving SNPs with A/T or C/G\n alleles probably remain in your data. If LD between nearby SNPs is high,\n --flip-scan should detect them.)\n* If you are dealing with genuine multiallelic variants, we recommend exporting\n that subset of the data to VCF (via e.g. '--recode vcf'), merging with\n another tool/script, and then importing the result; PLINK is not yet suited\n to handling them.\nSee https://www.cog-genomics.org/plink/1.9/data#merge3 for more discussion.\n", nbmarker_ct, (nbmarker_ct == 1)? "" : "s", outname);
while (0) {
report_non_biallelics_ret_NOMEM:
retval = RET_NOMEM;
@@ -16452,6 +16452,24 @@ int32_t merge_datasets(char* bedname, char* bimname, char* famname, char* outnam
if (retval) {
goto merge_datasets_ret_1;
}
+ if ((!mlpos) && (ullxx != cur_marker_ct)) {
+ // Update (2 May 2020): PLINK 1.07 errored out if the first input fileset
+ // had two variants with the same ID. However, it did *not* do so if
+ // this was true of later filesets, so in cases like
+ // https://github.com/chrchang/plink-ng/issues/140
+ // where one but not all filesets had a duplicate ID, it would behave in
+ // an asymmetric manner.
+ // There are valid reasons for permitting duplicate IDs in the first
+ // fileset (e.g. there are redundant loci for quality control purposes),
+ // so we don't want to copy PLINK 1.07's error-out behavior. However,
+ // there are also common dangers (e.g. there are a whole bunch of
+ // variants with ID=. which should be assigned distinct IDs before
+ // merge), so printing a warning where there previously was an error is
+ // justified.
+ // (Obvious todo for PLINK 2.0: also print this warning if the first
+ // fileset doesn't have a duplicate ID, but a later fileset does.)
+ logerrprint("Warning: First fileset to be merged contains duplicate variant ID(s). Variants\nwith matching IDs are all merged together; if this is not what you want (e.g.\nyou have a bunch of novel variants, all with ID \".\"), assign distinct IDs to\nthem (with e.g. --set-missing-var-ids) before rerunning this merge.\n");
+ }
if (!merge_list) {
if (!mlpos) {
uii = ullxx;
@@ -16460,8 +16478,6 @@ int32_t merge_datasets(char* bedname, char* bimname, char* famname, char* outnam
LOGPRINTFWW("%u marker%s to be merged from %s.\n", cur_marker_ct, (cur_marker_ct == 1)? "" : "s", mergelist_bim[1]);
// bugfix: don't underflow when a single file has duplicate IDs (e.g.
// '.').
- // Merging should fail anyway in that case, but we should not embarrass
- // ourselves by printing inaccurate numbers here.
uii = ullxx - uii;
LOGPRINTF("Of these, %u %s new, while %u %s present in the base dataset.\n", uii, (uii == 1)? "is" : "are", cur_marker_ct - uii, (cur_marker_ct - uii == 1)? "is" : "are");
}
=====================================
plink_misc.c
=====================================
@@ -3725,6 +3725,7 @@ int32_t het_report(FILE* bedfile, uintptr_t bed_offset, char* outname, char* out
double nei_sum = 0.0;
uint32_t chrom_fo_idx = 0xffffffffU; // deliberate overflow
uint32_t chrom_end = 0;
+ int32_t mt_code = chrom_info_ptr->xymt_codes[MT_OFFSET];
int32_t retval = 0;
Pigz_state ps;
uintptr_t* loadbuf_raw;
@@ -3799,9 +3800,17 @@ int32_t het_report(FILE* bedfile, uintptr_t bed_offset, char* outname, char* out
}
if (marker_uidx >= chrom_end) {
do {
+ uint32_t chrom_idx;
do {
chrom_fo_idx++;
- } while (is_set(chrom_info_ptr->haploid_mask, chrom_info_ptr->chrom_file_order[chrom_fo_idx]));
+ // bugfix (16 Jun 2020): forgot to separately exclude chrM here.
+ // Fortunately, this frequently didn't matter, since chrM is usually
+ // sorted last: in that case, if no alternate contigs are present,
+ // the marker_ct loop termination condition prevents chrM from being
+ // included. But chrM is positioned first in some files, in which
+ // case this fix matters.
+ chrom_idx = chrom_info_ptr->chrom_file_order[chrom_fo_idx];
+ } while (is_set(chrom_info_ptr->haploid_mask, chrom_idx) || (chrom_idx == (uint32_t)mt_code));
chrom_end = chrom_info_ptr->chrom_fo_vidx_start[chrom_fo_idx + 1];
marker_uidx = next_unset(marker_exclude, chrom_info_ptr->chrom_fo_vidx_start[chrom_fo_idx], chrom_end);
} while (marker_uidx >= chrom_end);
View it on GitLab: https://salsa.debian.org/med-team/plink1-9/-/compare/bdc534a0bb02b3d84bd43ac1c573753d418e80dc...cec30f5a652bdcad362287bd6ce60318feceecfe
--
View it on GitLab: https://salsa.debian.org/med-team/plink1-9/-/compare/bdc534a0bb02b3d84bd43ac1c573753d418e80dc...cec30f5a652bdcad362287bd6ce60318feceecfe
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20200706/25737437/attachment-0001.html>
More information about the debian-med-commit
mailing list