From c8b1c477641007efb4d2f6e97891bd33fc125301 Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Sat, 18 May 2013 11:01:06 -0400 Subject: [PATCH 01/99] Updating gsalib for R-3.0 compatibility * add package namespace that exports all the visible objects * list gsalib dependencies in the package requirements [fixes #49987933] --- .../sting/utils/R/gsalib/DESCRIPTION | 5 ++++- .../sting/utils/R/gsalib/NAMESPACE | 1 + .../sting/utils/R/gsalib/data/tearsheetdrop.jpg | Bin 50343 -> 0 bytes 3 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE delete mode 100755 public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION index 6116e8c66..ecf76a95b 100644 --- a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION @@ -3,8 +3,11 @@ Type: Package Title: Utility functions Version: 1.0 Date: 2010-10-02 +Imports: gplots, ggplot2, png Author: Kiran Garimella -Maintainer: Kiran Garimella +Maintainer: Mauricio Carneiro +BugReports: http://gatkforums.broadinstitute.org Description: Utility functions for GATK NGS analyses License: BSD LazyLoad: yes +NeedsCompilation: no diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE new file mode 100644 index 000000000..0bfe475b4 --- /dev/null +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE @@ -0,0 +1 @@ +exportPattern("^[^\\.]") \ No newline at end of file diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg deleted file mode 100755 index c9d480fa05f4acf066e3bf1cf469db47b8a1afc3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 50343 zcmdSAcT`kOw>Nmm8I+tsL86kgK!Zw_tbjx%2gx}$K|r!(1q2k33@SP2B$65=$3{eQ zw?NZC(|pJ0eeb>Bnl*RLH+RiHQ_bn~qq@@GRl92MT~*if*XzIyO;rt500##LPzOK2 zH5^Y*F_Vy{1;pQ3lIOrel`w&^Ui}<#Ld>p4FGV7 zK`diy=WPRGB@o-&dfT{y_-{FyuCDH2`M4(_=Ckqe0`uarf|&a+U%)pQ+||JBSTH{A zzv5ni?PvhG!1%w#@n``6H5&jl;rv&+WBPx1g+u?%+g@DkpW<-Hy#Rm!eSLilrX^|x zfGe-->&y4o*H^`09SZ=^?P=rd<@ZlokPw_P0Qi6U|6er!t8@UE0%-ulKYjFnkN?G& ze^QA6Kv6dUJP8MY$Kd-N_~vB*fSbVppa+2QN&v37_Joyg{JOWgJ$KZz(Fa*Q^ z2|xsp1;D2LtAD{%z>O*ZD3lSARCzimN)`S$=6?@9b^-tJ?V-CzpqG<_qYtMtIJ)ib za%#BQ2;Jor6A={!t~Y>il>jGu0MOP31i;+H009m)C^>(*4e}d@8I+@cab6RL<+(O*{NI_6oi1!~Dc=b2!^%C$9z{SD&`}gmF zM}YS?5aQ$G5fBj)5fKp*5)u)Ukq{A+5)%@VP>_(4k&%;=6Om9*Qjk-EknC?Ef3xG^ z{>@83ModUd_WwFucLO(xfmZ~DcsREK+?zOfH*v1}0S=IQ0&oWUI|=KhuHnp;|X`}zk4hlWSK!KP<^ z%+Ad(EN;Lze{TKS{=Kt{{BwMAiaJA|U;L#D2f+ImS^r7d|AnrbAYHg%AK(-Hr3(kw zAAIm`;uG8wC8So?Cwk#Yb6YHenD#;Hhc7)OT;c`@I%}^fQhIKQjXTJ{r2RwL{~2MC z|0Bx&ld%7yYY`yF!vQ-F?y%^ zC12}S58UFGI^;7<@420k3QLY+`wa{Rb{ENT$x}JYozgawgv%0!->88T$Y#(azEP2& zDBaU)!cQ>;h?jEk97SSvoytMP-cBv#%>q&Bs4e3t-_ivTmOe*IAF_J=hnD?x##Q01}Dmn$0^iu=1<*v%)mh z(BA-;oBGMlW20{Fi|Ika0E`pyL`akpj%$kyE$_I3(-T%^<*W%>K-{JpIAHANhVhTE z&)nJy_ASIAxz*(PATOVUwm3P%hw(^>P<)(#sfTGjSbwgpr28?>umlk8e}n&fVV-Ry zf1xYH)S{U?XcACeo@7zrTd99O?N1r3hnqe;#`1N(_$A$h;c5m&zkeaYK@KHjFw9G1 z$eB{}SNM%YX^A41&rVXCIXK(0drHKLFY-=GSqo|3F18$Ls@pOBoCGQ~)=x`bMC}vY z!b_a`tcOdjt8-HQC%-gaPlR9Eb?cCGEKT^VB@lq?*v~I-Voy`{!AKFw`4gSdtsK~ygJdyskP>O$Eo*RTzBAfBq(`v(` zwb_D)OV!SX(>RffYjN4N=CDPv`s1d55(QV^pVpD0Gt=)Pn;L+$Qd+>3)1i@%Nbmus zXixd{q>wQBZodzXQ_h%1= z(bVhV<}<97@Uny+$E!Z~)Y?kE2226HmL2Wq-aUJoH07lpa-@LMgQ0311$2=t0FO#v z3vm)nQ)G}&yCOv1DbjRalClIQtEP>_JcPpeUuGbkH71P z!^nyVf@Rzdnr>zzkOnIc2l6p`fceE~;ZIcYreZzN1~x)E&q3m#eVeiHXAW>9F+wvy ze`HU|Qf?Dbq7bkh59lmUKUrr-mlFKEm^R7VoDFA9%0uu|;Z5q%&=L%7DpA_*Bxfw# za9~aT6+^_8f6}HQgZT~M_kJ^s*`;v*ZF%e2^lI)NynMa2q%z!?V;e_%SzwBkleHceZUb5(bK)HVE%p|Uj<48%jo7(L+ zO*(-`s;WA&h4^HdMKYTmd@pk>J(X|_l!kG0`q*cR!-KXFAEkb7mKWmQmV-qLMoh0p zMT~Kt2NdGvc`xRLIUet5b5Z12d}t*HdrtR7g?4GPolV`eBy5NMc7g%G&V5#+l9Bx0 z>1jOmlZ`UGHu5|2KBZEIiTN%$un;}e!PeODY4xSq+rvp(&R-F~rH-^?uGD=jR>b3b zezXhU`*4rxTd1dnj5&J{DJ?FgEZgs!eabd>`twbbL-#(wLU^)9TCJI4t#PV-;<%m< z7b)xn|2&ZZe%zKpRYV^|Xcc7BmINoOsCeG>xP^TqL%3+369oRcu*2!;>O+W_gMMA}cr;ThlG{^L-1Nob!&0jHvG%UWroe2i$V zz!}i%j+#HcfH~kvRQ}n{r*d{750d~|mR0Xh0{0U2nwhpC(L(T8_IdzUs=!(Q%zIB; zAqaLz0A&W8WMBHzZj6#X5TTTtYKx=*BsB>0`|y@0ZAb73!#%u}h)4g>{3w^9iW)AZ ziFYI8Ko%0+Y<7UX5#^nZ7ax)-S&Ctg3CEWmclmY=xV?X<59_inNXRx-88~RHc3odF zHRFG|k_P9G&MRT&(S0lPoLCT)ywit$>=Xe<7+R46^z+&zwLEAqSv7Qr{Ks^palfI} zs@1!(B)`5H=n6Cy^HcgD8tH2ih17z}a79wZxd&VW!}ZWU>k#v6;A1J0;~F3}Tf^Sh zLFDIt3JH5OEi7DhV#Ugl_U+=&)0iPkNFT*a)q@_7pu78z+&q6}$YwUX(*+nBBBbMl z>qA|1+hGl^Qua-4b#2if@)uQPY<-@Sz6oZ=h9@yoy9tqARdrQzOr%v?@@r@}tR5Yn z-iM-J>LwJNJi&5Z0~=bw*T9hDH4xK}Zn*|jbm0&zZBrM zYX9W_Fk5H$RXAil9>NH9hkV@=F`>dolUg&it5rW3&L&x2=;>QN>!bLBP=3QMm`it; zo-nu4LRIt-hAkN0=~ZJ%E-QbrTp~2mwG&j=laA^^w{b;V#arE6c)N}R*MezW16>WM ziGAa10RP8T1H`4j?uRfY5J|Jq^~%j-^9hpSwL3iDaROGrD#%{U~vdO_Z=2X& zyP#h(?{uIe+G|_yz6lvm!aPDI2lt)(%%N8O<%1be1b@iVH6VCiU>Y`Qg)qGaQVKZo1F|irFgHJqL^RRtHQ>hRPI9C`MZZleR40SC&b@zzgwIY8{ZGK=T6i3nQrxM1!{N= zeD&w}I@AXa@5#!NYhZ`_={jkZzqfIDdC*Qh^sH#bO4);x(&jH6B|k-^&&gdIbo|r zu-r(JAy2J3XPTKD;PEAl@NQAgQ$|ZCU=Y|*qTC4F(1-PYb2gKO_?u65&dDo*R0xwI1(iwQ(k^1u5rE z;x^&M>!}7xEFbVz^Ys)05#toXci%gafXs;&7NQKx0G3gE4}{Ee^rPX~v_Rf^nw;OK zC4*p_RN=F>cH-+2f#R5u-Cxu&3Q&m|`6mB>HBkRXxS&1330@dXCh0#Y6%M6TRdN8d;Hpp^gAVWYCEv(Z7(@Zv<7c8;Ie< zk&0oH^VVJJzA*GqmO142bA1q$u0&E0q%@G>tyx4i#>v>t#)q>ktl;#4O$A`(=cp#CE&G(0 zwcQ~bFkDDr`4$qCj_0OkYVN%lF#)Lt@D49w(wZcj)p$7ofbY;DgmW~;3Ixa-Ja2kT zRSod%%EB{}L!D}HKM&kZCZ3Q0-VQqH+5YiX572p)uFQxxM+-zZ(nM%d;qkl)UWZ)+ z#1p1~3>W8#1Wp_G)bggxfWR9yc|AN`;CMCzxZ%(NcrQkZt^cyZrIZI20CQkjZI4VeO#^-~${kEdJ74QSF=!|5y%e8;$+ zvu;a>Z8F}@QT5zYqOv)*VfFoxYS-dBedBbw-@SgHF62}gdG0^@lB3+sYXSvX^GJzd~}yOY$ZgO;0H_zVXEbh z?=>Lvj6D2PKvEn&F4ar{J!HLtg9LjcY$~UqS8D;IUyf0kzBA1w$DZ=R+?hd|Kdfp; z!uk%^NSvV$?8}pcAY89qVj>vFK)81SEHm5<=d3mzD|ij`*oIMu@FIueed<189wSxL z*I9->BmLtrlIxc%#!84PWVn4Y3PmeWP5s7}m}B(!JZJnN)R6qcEHTTdad(W{T3S`iqk_##`8hFBo9mGBT8^b+CPI3D z^coS(|e;+%Yt#Y&{9W7qj*X2v(%c7Lo^?oW0D398K9Pv@H6Xp_Dp7 zdPH^5fC~d&ordGm17)yQIdt&lJ&e8;-b8W81LRSX^>oaa&<_C|ebRELzf!Grezdlm zsLpe7v%0wsG2x{a5L}f*)*BafV;Zi3g*p?oaVffP5KDlrs~Wi~4J<~>of!mIK8oLA zAD_7$!~Dvh{u)qdB#)2(Zp=DK^jmh_VU2c8geGcQrZy*x2|I(OMmY2zEgBmmg$J~C zww2HhP#eWMyQ-a02b=Y`xjM-0yhiCHUuY_0v;Scn#Me9kpFIC{$hptO~K^pg3HTVy_F8LAiqIBNu zx2{bI%RRZ(Yd}vu(NlUMr1zGFNABD7dbbJ5{p>JBDEiJ1m}SV*tHLm0jB6KiFG;W$ zOIM)W*!9D`#^&m0tZx0ZYt}Q9TLcT8D(1G4s!EEd&HADCy<#{$OnZwhY!7vE293lb zRfF>2M))=u5NUeGT+q? zzmgZ>zZ6~cj7IEX$z%~$@S{Ww)5ngK{s{`FYk>Ob#dHV@oZ}{%!vIzfp7{5`=Ey8uzZ-n)?sj+|5gzIeF8qzFd39Fvj z=h-iz(z?FqyV{F;S@({G9Pnpz$~A>V8^1c0_`S>RhsC$0sZDQ2kMsLsx=$zQK%uKe z^YLQDYcX_q{rNOVf@>iDbb@jPqcAP(Lq!$2)w~ZEpcn*!NB@bOTRY;0Lr;&z!f@DkpQ}dxdIjTp-Oyg_3~}4+SP#O`fRI zy-O@3)WB`^roe-)xK;rZcmSQjhCtLa+3zfCY7g6 zQ*z6vaPj1KdzMs)U@WQNx^TiZz;H5wtMv)AoqF;G=MJ{pr|yQBZMdw^?tNDJu$4Bo zoyeVgGm3WDLsmeq+-UHA|v({)NkH~j!-f3DCKBdmdcw=hv zZ0SMUcDi%))2F?dPQ5LcMY{C8xI15vwT`r$GwdR+u@4fdsR+{p0^sO9o4!s>5DkTws z8H{kVc%UJ#g>94>4YZ}>_2BffmQj-|QHGaK=IyA8WE6i8GP!(|XXtvzKa{yOd%@M& zpqB95Y|g6tOp?hkhmlUtJ7v!bKOhAajr1QP6UN&EogUabPQu6lEnfs^`jz$#e*N;p z%_qZ3`clnkre@+WE@C}3QVUmQoBO%eEuN|n=Bp}6UA%zRHT}tU6(m>x8r8iJr~_hX zfY~9%ratQ$cr+`N#clk#v+2$!6QP=W2M!vEub;jDkQ(_*x40n21U;3JY0?@8@n5ZT z)?uwMs!PwwxIK5cCG$Z18JLvDhi9)q0> zBSGFqpACNA(MNl^lcBA8vFy3eIvS!+X0tAQ;_<(!{E{cgQRNUXGf$64t)T-MdY?h< zgwYlxwdT?hx4UAs+FM*SZa2dYi(8Us`h4*tOc^xn^ftm572S;S&!45lE*#=)EcT9j z)G||2C)&=u5ArN0z9*UfYpZcwKP@4os@Zx+6MI@6jCud=@SsN)673uiHn{c4`0?xG z9V>--Xpc6Q+62QCHI#M@^g1ZMt8!UGB%|F%7Fh$sC1HE92Vi$|?Im7mO7v$W!Fy#*#e8-&c* zTgq~ADTZ-H)qUq-|0K{!TI|%bsNGXz?8^`;QJ<3#%rD8{bf5kUZSl;L%!LC-%J1oE zGCMlU#yhK>Mu<5Kz4-cC+$of*D;r6rgD6dCAoi)o8#GOcl4Q}PaA7bQwuzT<|M`k< zbv@djG!{RGlUnX*kpoMrg4jlRpuaFgxECTz<1yxURVzb_(Q19hIj7?(wmJl3@WrA) z(KJf&|rq* zkZs||ee)SVU&Pj{@QXm(YnfBeRp#xtj~^lG9W|@fHTBb8@gDvW&~98d(2)C*FpGpz zYYZY;?fCET?00Q|KAH<>C3Ts-_+P$PipSsY9gA_GJS{ECp*g1uSvDD{|ULXW3LIWibH2hY0=)I^CebSyns5ws1Gc~b{5 z(Z9kqcnwgU*!`%%a8;od|EyX5;o*@H`uKHGyPy5}{d}>W2coXc+apCi&klU~@6tUg zf;jNN=kCsAuv*L}i)GjdZ@Kc?PZjmUtejk-sr!$mkdtS$mF#fc$8&?wy4Zs?$idS~ z5$u?v2-50xyA|T7A1jLVZA=QpSELIL=3ugD$g5)sNbE~s;1Fp^OzosDL^v1hBf)9O z$ejj#hDysG>|{eWCT&nf%~4w|E-wc>IQ+iggjiDg)93StoUAd>^igzUo2oQUy?w{B zZgdNZ4gS2`TPJ!3X0WytjQ?@j;Qe;Rmb=y}%t*{+S2uGF`lki;SspaLh(U*|A{Ta+ zoeHgwSnBB%oD~xO2$T0O-^tO*m|CnG;l4-^SRzvXhHGw{p9%Iv9WRDf_X!$QatUzJ zn}{K!NqPO!0GA$)g|7KIV@sx%y5#QAEaSG|%WM`d4NDQ!{^g@Dkt5CCmXvr=buWjA z;)h^`SDD1JBW=}O2DCqmR`oO08(Rgtc#FT#Ztx9sSe;8>a9H4_giL3Ok?iH1$M#tv zjD@3Cd67cz@V(IAM*El#3nkw34*GX8b~GXy%l*th4p4qVzE0jYsQCHHTE@mo$2Qz0 zN_qn`!I^RE-HTFa5rq=TZhiXaWQ{3IR=xzCk|`^nvd5Y5L?w|F|uk?t6}?G52q zl2=xtO)00sBxr9vS3whGP40%-XcxJYF<%e2Mg#F=&CR!haxqV3Mt(Y!Ull^vi@Qj> z?7LWZ@>!9DB^4`sL8z!}fP^y1sc0wN*=x^(DT_9F>^Zr8X2Kz95ewb&n+o-lYhcFm zHY%g6sRO1?&2<{Z83CTp<%IpX z1`v9#vJjgCJX9Xq0?u+^+-*fAZ%_$0lZZ>>xbb_;|0*}MQHn{JOub2OShS#SGxJT< zK+;CY=Dl7uj`-du(Ij4rUJ3dkBh9-%I)3IAt;$taYbFJGXWzBWRh(Yv*~F8erlXRd zq4)p1o8@6{_APyB`SvL7>!mCeu3Hgo%J{4^m#6|$g!ZFxNG3~g;>A=Vr3y|RES#1# z@mN)9FGqV`hL(PDL4Fc9Vn}y9S>tGcm-`f+L5)*LAZx!XI0RY$(4R8TyQ#O0ePrQs z4XE4KK3^pn>Ih`O&{9fdqiE6J@4OkH3E+B0x#9nk<3%nn8*+~BM+B%9lwj|i=QSUl zId^SxBoCxs z_!kF6&i|=fun-;q=fd2VdW|5TU{`JQp2hdKEJTT`!<2>Ot&0pGb0*(7zq>;8J%i-OHB8NW05_ zbNqR0CVJG9Z1!&BDwc}$MPo}YFh5;cG zq3?yJRzMBgZ1m3`Qo-3SJC1o9cmCQd+8Xf`*}h(e+S9T>cw#X zEmfulyh|SoYUVwi5JHG86_N5LbTJ*Slj8U0#hSME!%IO`V?0Iu_wmaX(jSMmX!2a$ zjzlIz0$qMq3QiQ;R27V3_L=-t62H$W$Skti-Mifh-SYk;yuQ}$&WxR0y{k>uy-GLN z{;X1wq!G4X+~U|HvvN!Ck<0FPCAP7!xUf+O!ews2Do)V7EJP0}lz`aQ4H2I=ji_{f zyZiLjOU@JpX%gEZ-_r{+cnXG9`cg4$D4)iiIBdd-V$QO${#!pP%can9=}U_ETg-WG zV%pRP_oL$+v@{A-8Y9tI7_{ES+-zX;4uU>aO6d!tn5QSh?;6m&J10A2u8DI}%WhJ2 zBz_I}7nk+5pJuO2Pu0HLS0hW^f*#RX1fpV6FE20!!&f?pSPZoqN)RkF$Psl>+s_{& zTekM7ZT1>~6E={=4ewHq#4GbEFnsenNMGBviZNacD@RQCHOXm&?-(=N)i?mgfFkFj)R)ZRd+=p)_g6pJd>R(FGaQ5B+kSa(}!Zxf?oCyQ_uL*+xA> zyA)i|`E*@z$9F!A|5LFLD)&?ne#gMTJv6gTux>vl2mONGiuIsN?m!J%O)eG$db7u=WKf0F$~|mOxQ8!;^p?W{ z3JG|*_LTLL<>IBb%K7Hk zp!9g7!64s4K+(9=)UdnAjVW}8WovjZ9-#UzQ!==Pm&`Akc`_yp;H-wzBnB}*2!ee* zq@A9jJONrZn_%I~`68mT(M(M8)9oFj=t*3HajFu&m)o)!zHu2;u^Ui`CxdEA|+yj$jS0{(dMPk#p|gPhSP54mVNrdmFy%P z>3O8NKqOlTlpQQ4+MdBv1DHw@NI#SnRh)UDOkxL_zpgY=8!ny35O+1b#nUE>+V7-{cJ;5T!BwO$H1K0Z++@Lr5 zc3OWVgxO38uR5~~N<~damX5b)r@iICt6L#oRPT*`m3Sx@yRpXhb8-iUu8~@A=7cw{^CpDwpKRmTjDH zjol;PmtvpID}nAv99FgccORZDgUf>FHQ@4IN6WE5uO879>G~nB_ULxJnaf}*&OTG& ztJF2?3*)7!JQ4Pp0+1<_L_gQ|2`;@bLf}jZ`rmpWotz;Y=z+?4M=Qp5hH7*gg;noG zKU@8g%)n*i*kEWJ*e)%IO6@8+nDQ5B10`)HG>=T zEZ`0`{T~-*XhInI&Z16gdEZN94?GY%QEo<-U~fkl&_0Fo&A_d49fy6xQDnPRf%ZJ= zWWs>X?nttbJ80#N_pYa1uT52ABkQfGf4_M)hN>l#dLLRk_FJfT|bJ=$#Sa9EcK;~V`Dklvevtw_69``TyELVjt~)%|uk#l8B1Td&v- zennQON%mtW*Erg5xg$1F(&#l`#G!Bm2YKuu*^1>K!AgmGt^2{bS|_pk?a6!@N^bKc zU%Lxd%k5>y!pfb4>}qPhl&q9Az#hRLQ4g|mB`4yr-rO@VH!Fcjczsk)`C($qQ6_({M&`8~(= z5L|+|xi9fbvffqXph;6tKmiphK^|6c;KEp+_DMby)lpYRJq}c9LsuV7wWz- zW}xw?B3l);8sIv+d-u^Df#SErtAy)N?K9+QnIwhcnn_*O;_fO-LSRN!s%8XCDJaIU zhr<-Wq$0ADrZqobRbmxF&4pv~+*x?dkeZ${}@xRo|7| z{rWXYxane)v(4Sl18ieGp`RYsAKZa@`5eftsZGNh8)8^o0vKL;^hZ!jvSq3Pp#p-s zR8FWM^F|MoXAfP(#Yf0JDu!;^WZf-%o)&a5Ni<@G&>r?#y9w=4hPuDkk*O{TH5_1P zPb%qF3=!Nwzhh9CJ4?HXMb^fhx|1(~{!4_tT5Qch>IavWFUhuwsmDEvZp%Yj2sjPX2b5Dsi@r#Bl|1-LX9atAH#DPU4L=@~Dk*nHvK`7}! zZGiZIqQcZ^){(;GyV7H-8tdj*qSp1Z+68U1hbj81>m;H*10gSvby$)jxYk zca+G^i`E5AQHRi46URnc@eIGDzDOyrwEdRRrBlOu{~T!& zAAh6NfX2;y_KZ2RfZO4ot4rCpxsF6p0iBGDr%Pr|7Ykv$V{2bxV`{tB-&)mAyaIPM zN4pr%YN-6FlVNLQRwcPh@TY(@mKR$)U%wS?)#vaGwA;Q(%%y#PpMw{HfV=Eaf7U9LuiB~n;ZBc&b4uA&NqeT50})YzyDUi%UCAbw3?g` zlh{~nUa)6Mexd2hQ(h7G;A^Szf-Il6K|X1Tp{=O z6L6`KQ51$wtzgVR_zz9m=~$8mq}MMZ4TwuYf-p;DRdwmJ8{G(H0+(;yNDj&kWY}t_ z;;WrYc?D@o^QYCZdAvmd4oEMF^sak{=Rc-lTGX3%sv0Y8t87oZa=)MZgxDj-z>`>A zC^IBi*rXyadZEN*_hL1-JRt3Ge0-u$PW#|j>RqEOzX=CdT8LA=YER+_S2!1u+eTX~ zS`CrjUr=|#TzlZ;6kHLoq>eU2+6?$z483w_N;V7SeUPh{B{+JZpuXIc`sFz1ak=B1 z)2$namj+a59^rxe_laGtuc{RJRb1Tb2S}Za$XFk-CRH%t5v9g|hOFmtFic=p)&{yT z3=2@CPd9cm9-W7? zw3mH5Yn_$5e`nSW)nxb$%M-AqfzG~k!-OFzqoD2;zjL?T8> zF1|Km(+t+hdDH}JSGKloa`d%eUe};DCk(#D)9<=uC#r5^yKRV)K))zLx53Ag5Qn4i z(%3Nm0N!3jp_wo)vNf_D!xhEa3Jw2z{S?)^Q{vCmxW&ewalVSptcF|D=~V{Vonfet z;H3%G+L=cOX2=KFRdcuaZOzYFQWurC=%#ic2+fp)EU7OQ*tr7wd$S^R)Cxj<&JmMT z*%6kF!f{{a(3%@pGa%Th12jpBO*lG?Qri=vVw+10P~<-E#v1{${%Eev#i zeq(%B%B1giR*kp<$q6>I8tw;Mn1D{uy~^ua{|LHcsRlWi1)qiRBRXndhd@HBHCR|6 zq*iNYkFuthX^BQPA|;!>aBIS>2?8e)6&3m}A3=MR6!|gkXu(elA-agtUia|Dg4oi| zhPXqYHCj+>Y1o&0x3KjWIw`db!WTQa|`GPs@_!C|=0mtYTy5rM)$@_u>U!?uw_?-n-QKv@H!Kg`rG9XlNc+@+YxqBUfD1Qrk zAN0g7R=djJ(~eGk#b^#0#n|{&@rJMdy(i~^>0T{Y_6Dx{PST+p!3+1!+uJxO!lti* z`qflqI*J;hSO#LnFPkeyi z?K$yr^LdM&@|a&ExU4>g#v6H>t1%o{Ayv8C{G5JBR1(mLdwgFP{;um4G+hJgPjA;~ zYcVMh)Ms>w^n$qt(Yq28>9oG^k<4@TQ@RtT0(wM@Q&f4DE)(&_uZm9}D|$6_N=PfS z({UJup1pwQdSuJn9v$7elW{BIRmN3%Ym-Swx`jhn$3%)7M#~j`guc@?v|`=&xP9uS zWXj@WtHlM2#fIvEc7r{+C$GOg=$6oJ$jIQN<7H&5ZSGos-}*|51EB|95S=j5uU^Bzc2 z=ij(mK}C3Cf^7GuhsgJGWn*vuMgj8*!(Ppat9RaJyNw+`&W}okJ4*QA{NKcUEVK5g zj-bJpYZ1q1efVe>2!`6bPXwb2dh+H}5S<0cK9j7Wflh`N`ZEs7BV(hkOR0*(hs1@d zeu-byo{A@it8@K;*nkHg^Y5bHT^e9i(Y_xsPZ4Ja3hTcWnO4lzHb)h3XO|2n4_DoV zDA-*ed?yyd{1BE}I`CbBO1mf({)pQd8BJAmc7tl=Re_RL8f}z|$ZpP&Kz)QqggZ zB$X?N zhOV#02ZcpcFBRHlYjwqHcR1bqqzY8Uf4#+tdLBIIuz9k#YXkm*aC5UXRqgO#gyHf>Df;!PzkuQ!F*%i}9wm%KI5)G>&6p+4YENWLe9 zEvu#DUWaMaAAQfM)6Bo6!mDzH#9CVYIxi3in_g?0=vjx}m|sti%-|C)1{PjE=_6Islx#ZQKv<~UfRlY!!FLSmE0QC+Q#7nQj1sL2 zy?lVtL-I6K&O3!M7p(tE*hr#OaM$UG*f4JJaiPs$y#MTlklP)f^B2qJ=@YxrM?+Q= zdr%t5i6}Yu*SrvFpEc4XyUk!HsITiSDdh4yRN%{yWxI!NM~TK6O&Dqicr5CB(G_m#|5NWS`RJuTQmDOG&G!|eJ&-%n$ z&Lh#zsMsL-QKo6l`=uuciJ6Z@+C0S_#V20nwjZ68;f~PJER@-h-JbN?^zgYMD&Xp? zB1_mr7ee*GYCSftBuWe2SNNNGE~*C0u1#B8wQJh&J)57^(@Y@yy_+RmR(`mThX*1} z*tWL7u|EC*QG_L>x|Bg%4d|&!$I0Kkl)TD$!NWSgG8F8DAD;+m{;EZ<_LO(FD!e9N-LSh#BREOQ-x4q33C;Tn@3<>l9qD1rDL$vmWP2`uaK8*yRFh6a0&Ujq&t zi`m_42yg;fq(NnX8z2K$nwx#>yhw-s`?U;?x}G)?)$mY(UyCcQHlIa{LQ6Isw_f+Z zv$IKbkUt08etu6?qHa(s;S>tI``G2Kx< z|KH(=h0!mf3;H09?#Z|7+TJRBbG{H;86g^toJ{qofb8W|P-=`jul0ap&P8$7*CE); zPCpkWfi4<^QpiK##O>N;0S4E=HGGEoEhYGl$H|8OCmThK`{gnxjK?Y_bKjo?Y} z94#;1W*0(l75Y3S$qL2gVs-IQG8i{$Y{>Z!c)-%2F02Q_9QiYS-PQ3q`Sul|^KSPw z&<-BJrN&H+CSL=7l~{&z@$)9M6;k~1^j)*QPT|kZvmv+q`BSr2GTl!CO#rtli)O0u zHn{uB_X<2$yJVBj$gZnodBIES-%=bmI|YV(HxFn`e4&H^VZE!r2HBt@LvmV`!*V$sqFXd^lcMzm*Z*EkBBl5*Atim7d!FG-#qpj^hz7_5KCs#k)=E0xB+=?A-p-!_x>T=VSmH7ETFG1C@Am( z9M3`~8t@dt^tljCqNQr@Ga+lHAC6wA>3NVRoc3`2rx6C!qiiohyF?y9in-qQZ+Jw#H%h)XrC zrRDA&at456HB5;t`n?#fKZG1qBv#j=uYoVZu?S$KBzK(J=ylj^>ZCWF6og5o2V@z-l?% zD9xQZ-){4v@yGU^ZqLG;1iUX2e0J`eQd*w2{%KRxIx+V9Gk;<7OV(WA;oiuTs-~Qj zwy?Z!dy8h3U%TENLF$_@tg&Uk6=|*T!^SM}rmzeFTDlM29m>oXl;%83Z3uGk+(Cm4 z_qc8@$_g|zvC`l zi4;z#jlkK2`Rus0SDn52_$F%QHQsDjX~9Cy3@`k)?xoNTM=(WF{JJ|Cy(y>z0wA8(j6y0nB|59T&x?t9gJInq!Inv_R-8Mn7dp zqC`Y|HJivyW2gZTXMwUiEv%AENX*F$F%@3HzzefOTW(Z4k1ZBtWll;(mzBmChnU6y z(&9)bzeI=C$ahR9#RfXwO8=wff;Q=xlAqSi?Y`l8PtO1#?iX|-@gZ7J$aqg(Rm}># zN%+e4Vp0S^i2nB`&M)m>{1# z?0*sVrr}Wj;oJBidyJj3j8I6(zMGIOSt9!~l?rJ{2pKaXWDi9VqwLwT%Qp5Up{!ZL z43T{%8h6dobANyT<9{5_@w|ARm%K2y&;6NeIj{3PFD1}1A~?oRy#A;r;S{TSy#C3@ zc8q&VI^gLEyxAM{-NtZ09+ttq{7SlR`EEWKx^hu17s9!={JcvKEua>h#JIkx4k`Tc zz(#G)qNACa`2jZ|Y1rM>aMF@}I+a)XY={gD){wNalJLojNWo zHH(rL`0p*Ru2ZhHRuHoEY$B74y5Zx|5C+4e{h*_b81()9F(uB5kwTVVwg&RwAisC4 zPoz+;^@x#jA!=FMTvD5JOp`fpu#b&Df*N6yfUVgVCt*9*F7`GG0{~ zDpN0pu;K)ww#^_+dt(rhMPRamY0IMM;ikCnknKH)&LY-LfaUL$<<5{B>+%4^0Q+R( zimC`{;WgF8uwyETdoY@jP3Lzp=S0c*QdK`D)bdk1$k0h6g9?+Ap(W=pji^U&=7NfB zu>$B7e^hL>65Q>7J|Du}`S{v7k+rE@ZLT+yv~AEb1d5hdX(7FhhERfyQ%B!~7Bbtj zPG;qQ{9Pn&P*MDQts0_pDX>}r&Q$QC=eW+CbmQ;gCFrpC&^=@D0SXIueXkE%LpVCa zi=u6-CGWS`nChn{Y|Am^b%qa@oLZYqEvxY}#823Ps{fX<;47-a8Do;qTgKAqO^8YL zx{!gD^=BC2YnQmbHo9uCx9~n2QruM-0EsuXDP$N&2ZpG16#?^KX2sq&s+h(R}X}NMU%#Q;3a{E!H-vmm4xT^^%+7tW=g*beLIuAzusFS)%0iB z)ieH3pxx-(x4&Ae*g*sr{VSZk5zNRY4#{rBp``n_rb8CL%{;f|Oo?%fz9aqC2~u%_ zhfU*G7?pR^tSO5iyqEO=iC6nhMb9BO9!9_Rc30GYuk}9m!W&F;)I9{C^~elMXPY3J zM5+naP4n;tM{8JhvwnfF;xC z*){esj-7F3X`jaZW+oRz-Y!hYrF0RUpBsq>b|1uG8!@~26vuFHlN%GN>n zK(Hoh6rX)fNap|s{auSE-%g)t(mc)$UHbXdNagmSoeF1zSj?r`Jn9|cT^dMtbRhfA z;O@{A`u{d3f+X4VoPnpp$EVz8Ya4|M!tX{#>FOveNw61V&PTY9*z=f0HzSY`bPDw` z`0v(I6#;gVIfD428NoxnSpjH@?CU#fJ=H%ORkIL&MTcj&x_kmDDkD><+;9PpzEv3P z+@*@?xlU)JauL(}YmRVBFO>leGv{rY;-e#3bwqu9u*u}XTe zJ&C#jD1v-1XxvQbI)|R|lB2!pFbO4TxspP=p6eSFdDbQ$EzVjhYKWdvzJD%v zFQ4IQ4`$*LR~yWJd_8wFt*KHGKIqgu`?oW5qSVvt z^0}C$2)q38`l@>jmp-wbWP`v8Vf6`A0ljW)(@IK*13pS(Xs|$llpcF4e)BrXmbWMH*z0VS;-1x43DMyd%6gt;5jo*T+wdZodUIGfAI zLsTkmpezjl{Yn1!a;BrewF#;a6W)Axiu67(9xW-MHm8qn)9Ctm=WTdkUyj=tz4FB5m1MGS zbw=__ihf+;Lb>Mt%~qc$)0kB}X1?|sQO5AhR1I1>JkO}<*%(1#PrB#y%V|TlYYN6I zi&yJzjl>qAQnMABqR)RC11rS3oIbtUcm&aN>b6 z-k^+qntCd-JQ)jJ(afpd0yQL>uPc8#-&4KOQQzvAE!;Ca+{gWNb?p~*aT~#c@dNc| z2trsAUEO~LLS-f#%*Xc81tw8ZEIDOf^X}WvpVgF2?w3p>ed4Fb1d??>q3SoJo;*9r zI)@;dPe6Xc_`2ZqFIYj84vCZCJdlSFT=HE|U)GsBiqg9}sqd~4*HO8#6*O-Ic`a$_ z^u0_a?qQ*{w!JUo3RMU}t|gi8{#eC5{mcDiO%ERDh8cj1x&uC*B-)aSB`of~+o`h| zUp}9_X04o4clO17+t+CqPX!smK%_(r&4BU({0Qk!|3So2-V_t+1=6|tTuf>RPGq~y zlj8F23OFX_w>G4UtV)ld_yh7r6CmT1$XY$h~+H3WH5D2EX4+wh)ERv&0nkB&{ zD0AF&NcN0FUyz82%XiCrf-}|B-*;Zli%8C=sMw)7Ny~{8jqosX4Im#yVF`a~MDlkd zjlV)!76aV$Uz^75#ElKDgG%>(&1PcpZHc5mviGtXo--YZf)`Q{gAeS6CH_5rdZ!+U zTN8KasWT5ERG`k_74{xY^^sZd^)E8*0k4q446XGhyGpA~}jP z+}l|DDq-JpRs(lMNeV8LbV)ybz^Q~*FE$M>Jk+=2iweg7gGlrhecx#T+3&LPN=H%VSDTx8h5hiY zrqT~ybExh+_aSdNM(-ivf$8zp6c*TmlmD12T1Y8VqbTHp$amVS(H~4?)c$u|it`{{ zj8ix=HZ({R?#m1hbR)XdVJXf=nTrnJBQ$p1;a~Ad0%3<@ zaWd~>TnN!{uYV9fC?J>K7dS5qvi=j4c02smprSy7Ez;SVSR0Gd3$69f`c}cPzdZZh zGKu-V@~a<4_v1!bPvwA|ci59=Y`x$=$l$!bZ7A$Xqi29J*N(LzRs+OO6MhSSOq#E* z-8*+Zd)8vTK$T!BdM)->F+UNp$DM;~NT&+P_7|v;a6@BZe{K9@@s>E$tz4Uz zp{3hgciRJwUMqzvSgy6a&=Ec(FzBIabC4xrO_~5@JtSu;KZYDj8q6RBKEv@I!IQAU zD2a1mhZd5|r8+fG?O*Y&=PW(n&+p`UWQmi@=W4z`rF=`jELgXWd!PVVt@J9V;Uv^; z6SAFFc&G+d;a!siN^cnY95ITXS_TYqh8%*}c)^B-Fh?Ew_}ve~n;9v;aE&;sP#@m5 z8=6>fj@a(E!l&;+Kf#L?-bV8i^OqY+!!PL`z!tPrl!l+#zJ3{2pR&%#CdNt?AmH=J zWZJVDZPzRUOEq6xifYl%mNPGSzwoe_qnsyjbq9Mb#8Emsc6DXJryC}AaO*2t5}SyY zBB{Fx{;10Iq#D&vU+H{9=G|2YQG)!^tl2BMNHOze+8*;KPZFJ}8UfhaMe6n0P}oT; z;@!g`BJgm?%C)iD{A${^Ed85RZI=(*Wh>Uu=LPRRP&L5YG6wJyMmo-hfnXgE^eM_G z*r#khe9)F{MK=CsR~!3}dFu)hYRMb*7%j-l)R~!Ag7;Ciq+1qcwD(^A7)TGXo)i16 zwm*y)Fcw)$MQPVY@h2N+ypvMD*9{T#?2F?cvM8*R8sE61$9!$IM9Y!go_$29rsdlS+{ce0W1jqnp};SUcG zGg2R%e=Yp-kGy_24f4lxFSY-oUB4I-J&bM9>ccFST_y>Y5vN=}&hF|fR48SYTWO@s z-c`st5&VjL^*#ROUB?^O1s{Cs7L@9;oyPb=Sus6^AWHB7D^7JKRf430rzU+kVSPn> zDbe}a!QQ%V2&j+_79W)6;u|KIM7OX(m;`;c>%!NgLlG9*f#hk(Uw`sDTDv}!A_pW~B9a>d2{^v+ zp@9cW9zW@)h&8dn)%T8xJuTy}Z?u^F7|fefwHWSL<&RCX8KrZy69esN*`Q%bAjQ02 zs6ZS*JW@Xp{D$xib8;Jx3++04(Zgjt@qDY=(6=-j_JyFsIjTT1x$z?aG7Ond5hr3a zCK9_OXn7s5P$J#a4VI_|JQ_cAoZUBEwnqxh{{5>X*=(9Bm7nFUr{ckVeXb9!OWH^d zrf^|;EJA!cTnPt!+inBnu>^75!$F>8&#?Z}Hz_CmD_-fp!hBu7RjTQdaXF9&5_Abx ziQvMl)_`>pS`HORT#30qH{`@L#BYVf!+AmBZRZE#aq`(z*VpQM#xDI~r>@_15^)w; z82cFfmb6Ldz!Do)p@T@S!th&__i)&dA3MGDzA$*s*@-QnW_j{Ej5V{5 zHFpBUO(O%O{~NhQ=#zYp4JFb7#lzoDU1?1i&b!L${84Jz0;G%LR27C#|8%GXfu*Ov0N3Vg2@sxH>3)a5p1y$HV3vz`A3 zm%chs{;#(u_UXDAZy@&ve(8yV7G2{7!fz;LS- z7%n;^_0}wku?^mjlK?7vXJ_ZSr0a>cv4OwAcEjvOt1;0s>TJgyspTXOGfoSKjrqn{ zFJ}sPwR$d15ezdZr-8-Qmel2)Zj1!0mbeO^<| z%PJz(6%G_p?wh_xgA$EK;W$(TX&W>D7|!LN-G}AFf#+i0Ax)@_g*P97?9ZHDp}(oksT9ojpO3R&hxsOq zeA%!>j}cx&DQ@m^pZ#?1fKkb^2oYtx4^&kev&qldfT;4?lus%4{^wG@o$ zD_3vXeb)@pdsgeXn!mSnp;!3V2;bYKZ*~v^HNw!|#3~{M$6zQ3to7pfsMl8W6}*6w zV!+L|;U{+}XTHfE@?;x#k^h6YyT`+&Iv&x(5NXMnP_! zh9^?bk^%o>vI+knACO(ruIAxblD1~-j}@8t>#AqumYo>%tR%9???`367?8Aq z+2gGZcPD3%V7sv-x2fxg9VYm6;raP{bO|7)((hEu%!S6CIG@)_cCSX=ak(6t-mIe0 za2vD)>eAOP1%Tg_WS$Tz8ilw-5=_y}ejUYdPGQvVxusY|!GqyXW)U0BBI|^XFgklD zVL}+?PK!Yc0JVc&JPn|eSLh0~Cp7Y%>%6E!q*U;Sp@4`uPeu!A{j%3BO_5?rH}y#e zmmq^G7MH{;9z4(%U}`r|yTo`~qTjABNZ^(1LYnhWIfy=w{QLw{U2%7CE6Ic@B@M){ z7^(BawKBA>P$6(;c5@;nOg=sm!|cyjeql7T*+JsH33t8`>zAb8^_r9<(gYxP7rHA* zssi)CpSIpTYG0gGm3Woih}RYUzC#p4Uf5k`tKDBL*!ZQ-#d5 z9a|xo%3GH#qdEGcA9dLU=NhI)5ER|!Y^U}D0&5RSTom`J34V85GoCqKHa*e6cL{Pb z)4!Tl5TJh(IuNCU;u5v38J9^P2~FMeX_33S^QZy6zVPSf=dQ5*r#F-FMjB+BCqDQ+ zXb)Yci}vE(WT2sn`{m94_yxZj4KAg{VML==Y<4L0?1y08j-jaSbDHP_RosDgipPiA z?76Oq+qnL}N)t7+U`&2;^Fz=;P2|F2^&%K%w<$Gw%ZqjZLm6OBXWJntz$K`crUR*_ zq+bhsf6b%S4Xi=82E7esXsu^2`VTAorty#nUkZF=tL-0TPlwn}=d3RB1sje^kdDrQ zLQYC+wh00qxd>MOt3y(1NcE?#pND=7-*Er&XI<|W?A7tH+A_jgV>)v*_(ICBjw;vh zBcnpHnU>?0<(0Xm)~6@QzshMXmNf-ebZZc@k-x*Hx69sqNRod}6M7y?w5?6Teh5s#)pwl%0wNVG*X6yd+%}?=9hf>m*Jm3RR5Pcf_;u&!zHWlY zNg8>cdRH6Tr$agCokSzuUEKU?WW3}4dmH@SklS;+vQ(aleq1Ab023E>8rEM8H6bB( zJIUr&mAGfDygiKE+0gTK1!sp8Brld53t0^J{2d}Y0iG0n0M;h~pZN#zwpPGH6VbA6 zmgo7Ssy(hMzIr3*`YhXbFWr=9H_2E9my!suDh3ZAPK6qBP?rHhCRu2Z;ElZA?S@UOUO3 zY(5<~(N_x#Pww5qeU7w>#HtS7{hc{^>C14`Jl9>os|KOvEMK#_GM`6aM|kg= zB6wjTraWgME%P5lc|dv@?rRH3ChTbt8OmOxPU;7i8exxCIF?e)KZAqzMb3wqgBkQ5 zdh}7|)=GnNU%UUSJ1rkuh=X}zbrx|FnnD?jRb|hOPVo5uS|Nky5nJ97fxq5T50tEm z^A9xdJdH9pecd?vIV>zOc0f8(`31aohqc#M&>h&so5yR6y8K95ah*g4zO%fd2yV5qsumF=dOL(m4cf#=f@VV%^&a3y%J`L`C|1N=ch;CTYgs;XCdF<<+B1Jo@m4R1 z*vz`btzTOv(09zbV>2W+$%mnZK#-g2-{1V+vE@pO`0L<;>MLgj{Yztgdy2Wk9w+IX zR1M!U*H40oD(+pelnfauhNP9I2ud5URzsMdt-hg5wk+*fJd9E?5&;!y3{>gZD~1P? zS?`FJ>PqegUy>=MHV33l-LbHmbTu?M1t(_b#m7jkdJEZRacE^+s;*5a>Hx*ZY}Gs8G-@QBV>UQRgvX7?%$o^yBmq2SsEV zZ?3gm2)0@*9Mb|Xh=5q3!I(Uy^!Y>cx4-vHNJH>A$cZ~fM@63twTuQ;ETe-{K$)X` z`0T}@ZDiX|ZLvU(;n5mt z#{ycnb4BO(+g9;%NKiq(JmS=F=^B%4n0j&0TnU6v<%Zm#@QxX`&ekQ#V|9q+S)0e? z<0Ac-$VY{1b0t!5vZgm@qlHh2O@jWjFWv-=oxpmeKZe+tubJHDG1#&dtE2gj1iXS@ zfjIOWjGZ|p^}Lx&{*-|=L@B&@F@31|Tx9To9QYS_VBWJY2Oo!kk5Tz+h|Zo*OkSJV zJD`YcAq1Nc3!8+MxTYv?`SEO^#+~{FWv%O)@lMkqY6*IEzfayGqn`}=2^VmMV|EX~a#|Aj9}a+*#>^^i26a}ePhB;9$ei@( zwNmo?a5uPoDWBKkI76CdqJ-&lA!7l2J)1-z2u1=*vfb8q*ZQVBas5$DHl%_Q|5Cu& zZnlHBVvB637@7DkUPpcB^;?%s>nUm2tBY5=z85}cAWP(xow~jJ4{|}eqHH@iXF>Yc zyFcErC$)KK6|lfIe@ zvbos(3xAGX1P<)sh%Shy2zJyp!j3%BPzH##1R^3d^T7!aFM#QBORFdZyfy^6)SYua z^xLUl=^puU@awPP)Bq&Po`F4mHkw6eJ?YQl+5uHiYjtyXEsRJPk940=&Ld=JC^g}Y z-fd}}F7p3S_~a$BsKqSPQbySBg{i%T3{fCs;|rJ)Bj_ymbkDB&wP6O3i2gSH!@inu zo6+vJr^}K=LT~xxSh?%7LnKFK#o?*uW-%o~ngG+D;=BP{@z7ZA4?|Yeed5I48Dc`; zvOe){g+v1$Bj{>g?lXL7UikyYPDkP%4c z?^WAQ7I#xD#bUp{Aezr&k^Wq9@7))N#4cWbVy``YUQ!&w6m3ws0SCD$MCZV#;J8vd zJg`T|YAxRY6|CO8%uf=kz`HL(q2cu|Gx=FI(`Qr6gI*dt#tRN=HUxN3z;1=0HvvlS zyYyxn7vr0zLvY;N!jBpXncrsX`H749q{Q1r92IhB*%8UuDlfN*NN>L|jjV{~Nh^+F z@&RrAf@;~4BlX8aZH+%5(}(2%y@=q&_+g{Fq=^%QFrEnl0tQ5d;CL6dgNMXyG4Hm? zmO=L}nc5BNc(=zr6BB??e!=T}_6|u_(FH;B|L^r(J?whMY-W%txj++ z`R5LO<89X1{a~yjp!ci&P3mVhy9W;#j+7)j8!{*CT3T>*Z;;vSYp&iwKR@-cl4@|5 zvuCYD-Zi@1OcP+skmRT<7vXrX4Pp4=JgkW$I$~m4K&Rco9MWSL?>$w(+jUmB&nY{| zckbbi+Riz%6hHqmnY*jYf#wo+HnHeOsF~nZC{-Y*2hk{-r0=l0cY1Ni@Fb9oALD8o zP3H=4>C1U6QJQ^qjL&mO?5B_~s#_R!znhN%w6cxzFZcVfBeW;*wNg5!O5`L@pvE2bZhZbGpO$Pel1Vme@+8ae>ZoA;$Zfr)6;f(<(;g5%eZB49Ze z-MxUcp&mmn9EpKA*4Zl@Z?)NyQtbZK>RFE;2WnCCxi8ZUYcvw&9ObXEpYIoZ_GtmK zqoKQI7QExP*d6v28XCV;zisqgPdv-r?s4&C6_QRONSBxif5K)|iT|(`|-@i9rO+JhJ-Qj$`)IxvIQL%?PTGEaM`|Ka&mQfWb zhv{G@wN1D+Of`ZWw{T?#V&+ekZ{8CW&whUPY&*R7^Mu zI^8*QYw4Y+f$KZXNy|1ZN15G+5aa$7+E?PJa~lcTr_8;AusIeE%k``}yfU=R!4Zk^%is;qVeEH6GE7PsVkFifC;&3m2L?Qyw`PCfl-}qlj zE?mI%RN5|tfjtxqea_EIiFmL(U*qbg+;6)J?2cC=BLdF4yB0PvOYEQ0>~=c4bRbnT z`Tde1?Pf+&tziZ-GWf*QqPw+0h2n{H5uc|8CG4j>D9+=!hJO&MAovJFxD+>^=&~<# z8LB?$gH?T}lSoTX)0Yn|#Pap<9lYF?xU;Bo`OLhknEdz>Q=0c67=3JOaojjn?~!H^m{cv6lpKzL@!; zwY$}%K01H;Q{1U4eYL=dl7Z3{z+u1(mgV{7^`zO&$oqAkMrWJ4PcrnWb`1=R<*v}c zRO9_0V*?!KEuU$g*0&B_3lsr%`n9|+fT!crUynAK# zmKr}`jNpK!=&@`s0+2o4T_L~{A15SsID%93q1cecJT7IEud*{uIR52SUD4!^XPTN) zh;NVEpJ$YkGF2J=1Gk-_Y}=T+^uZO;a8_GVM}Zb#RdOXaC%#FU4PjCh{+RCWoc;CS z=#R9k(Bc#6&uSv6GPl0jUt*opElu{RVnfgQAs->v`As+a>mzJ*Ro78f2Id;d^Y5&( zv=dMKU_7OnIuQKEr$*IQ;x`GpyG03W**jZ`9$Fx`6Vk;1Xg`K`_W(VIA3@2H9~o+O zB%4^}C9GWh&h_U0SkQ^#E2Gg2(z$!+3z(kUhP+gH5-usE=>Opc%p*C>1JC*T!3e^% z-Ip2MJ?4!2djB_PrRzRYu|~Ebiu}uxuf4nf#tWG{Hrm6r48S2+1zG`G6g^LXgUiPg zH@!lL&(dtb!*>l-qg*?T_##BT8?8cpa$V6g(Cnn*5C0WE?UXa< z(AP_bDE?Wf>kRQA@yPNh%G<*4Yw?rP=2wh-*Uja+pBtZskPGL%-|#PFICES-+*jBL z1kNk(MG?~H5qQ?P5L|+j+oQK6Tf$sY6$*+MoW9Z9Bo~w2PVzp0Zni<*7 zbyN<5b%`x*Aq;Qcmj~jNa9R~wDjZan=lce_UnXhhm9)g`TL+1(kkuP`-hQxr=G&jF z=@70L|9zU__|b#1G+c?~_mmhTaxEW#r67G*8iYae#DOpsF?B$dB?JnX-?^o)P-Yxj zZNKJ{D>+f-YT~mY`cbeyT4Yq-amhk#*12Z_kT2^~PVo{V?X4q-+2P*qz|B&_1_I(F zNOozR)Ywz~ddu(W#&{@~%>D93FV#;rEDu5R+r;_;(WqOT^VP^79WiQRVO)H#y&Yy8 zCnUulswHiyce;&-MI{SB)dHVK3xB@PkUiuO%@P%dI*Z2wN_inC9@FS8b^T!t#{Od%2jG}NRKti9cPclde zso!1*%`Wq6g=d8_T1()+>^(3uuSP!x^9ydZA$x@bRF9v=4k^cIITu`L?QCoLFr_pY z8yw+Z?7`Vu)SB-2G|peDVq+k&8U35!Wu+9=f=BE{6j5)`+EKE1V9997@O#KGeCTo) zmb(eelA5ni_vaSd^(Ohn2b1&6pW@R0w^K1jjm0&ctgZ$-R)0l5;c#H>l|{ zTyMg%ZW&5-Hhr8{@6`Rf(SOjAOPD{K$@1Im#uaQ((jTU)VE!UeS*EGTn@AY-0vs?7 zMf%fu7nPrl;YA0C)-!){hI!7_IFCw>QE*xkUJt|em=kPC$8-+xZnm$&1`q;(CCC*( ztA7MAVh@47Ys7@uz|d;7-ZKP#hH{VI67QS5e1g`ym6_c)R++YAOj3Ak175lv7`Jqo zynxUhP4Gkg*8GIczCGvw`t)>lQ#v#}vzN=^Dy~IlAOflHFJu2!fA994n{Ooiefcxq zpamGZH>Gk}xD6t|v?uD5OKy*=U4onmdNY|ZvM9J$#a?zFT2XZF* z&QmcV$FQYOc{&FiK;?hCLN0{n3)^*0h9Xr%KQ?qWzJ6OU>!{AyT9x&T?tcfUeWz*obYTK|lX|Q#r3Q{~kbDG3iXab@!zWC(hu*9kh>|gam zfqn9>lf;j@6XX0~!jmdUAXB zzG+MHKUFFYF;DSWAljCx z;yR8ys%CySw&+vHYv83WMY+;CnxJ4JckvYNneFF{2A>kMGJCaTN>xVz8j>(y*cY`7z~=9B}282&--e2G!}lJ@k0{{cJ|OEeYN zTEq6u(`D(ybd_eHZZ&6g33wEdt<2+FW46XA&3wpqs#iPBSaXkWGYDWBkPw=t@)AVH zq}`@!k5Zj&U-X6Ei3}hg3r%uY$OJG+LyZ|~ofWHNs?ERQ?Ja%R{59xuooE_$LRPIP zt;p4BKyz{C%CRj~)h%i<0tPCfLE5F*gRb+$On{SINHohK3HKf+Nn%}ErV6utyk56| zc+Tdtllk?}ZH8W*S{G6Rl~;l637$tPV7563Z?Nr84&?H((4P;X__@)0S@oY5jNf&y z)b#$=458E!a033ppy|&9FZB!wS%bq{hdC3MTW*ILVM;wm{DXuiErul}*0c?FCTjaW zy~y`yt}HSfb%DEd8XmMB$pwj43!1AIR~m&rwtj5se|)e6GBrSCKSm1u6;U6%w6p@8 zTN_y5>ar!{;*#$@K#IE(9Qpep>kWhN4gA+XzKx71(BUo0RCRnA&yg|B`)zKkb{FLB zvg)FofGgQ;hyeqB8{l=Z`BN>D;8bY%T&dN&S9iO2y`8*g{)|PH6{BZXJ~&eaGX-J? zg(UbskQ}$;RKg3EeBHXeyDv{mhw_OXUCvLZXpCOiR2x+v!ha-meK$b=ra#gx%DVHq zwDqK|3@)=28;mHK*qp_^htBIQs!=#b8>LUxgg{8nUY6&ZGNRSIivfA4Nq0Cdf9rl%zEFRY_L|}U70vAcN2zW z>N%)2b*A4F;oiiSM1XTIYswYCfviWv>N}Fu2Ip|Mh$~O;pC+%@oNOo%(v~_AJvk~Z z z)s^3iz0P6#oMunY`(BWJG*vTJU)_eMzRtEFm_I1K%^WHTD~|ZnN+{rVUQwR}Ad!<~ zn5IqeWHh%0+)eMsaqWcdd@}hR0OIn@NMMK5q%ThLb^!E;MOZ-u%?t%MU zjrTZ(GMZ|Zi2PLpeL>7ru+W0e^Pog~k7{q`o=;>>w8K z6gTx^g)Met~!PgG|qD6n9!Vh;7@1}YT0(5nmBJR({j|#cJ z#0YG>L5G6tw`5|+I3f&RKprGt(u+HB8?DX+Dh`q$#%9na(fY`wKkok}UIHR%g%xZh z0#8w`0Y{RmE#o~bccWG3U4MCXQAPVjcb8sV&Y9NKf^!^cg3q7i{CW97<*;Q}rxvu^ zXkMZ6>-8c|qqHa{u03ljyx$OBMtGVnxEf|l%fToti1E_hB<&y1w4Y!ivKQR7C2vK; zuPz=)*f5Lsh}44Z%j#1c(W%;s&e23M2nT;2(B(=`phm7L8Y^mf#CNOCd$M24J%u;F z8{QJ}u+r@FHpgKa?-Lu{PJq*_t(Rj|cESrcVNS!{^*I(hbi8ypCt8f^WtPQA%#%P z2_>~$dmV!TvZ59Rei*Gx&qhjt?26P zcWH;F8&Ya2X&?gS7<$rq!_0%_d?5_o^1szJ2w6*ldbjDok&6M|HvJm=y@!V-Mk^Xm>jy;H5YWkxa#5KEU#qn`A0#x($7<;sj9))TnsDUo%YQyMCtm`nyW)dB)*9>}aU8437)b?hS@( zOOvf^<`u#plD}zRK20*cn z%pn3)baEIffVgEP&U=LewlI@?48xW+<`|cec7i&OJCi@Wq!Y-*29zSs&a4h z#Crx6QA@j-W1+)L^DE#~^4q}`+bx6+h@xKUkR>%Ata8^9Y@e(MC{h^+w(NdkZ5bar zh_uoQKLV53Ez`K=5aobxsWYCcYOQZxc9UyQ? z;CCl`P}ho=dUI6t9~53cmtcIH3woVK8BgB6$vmu!rh-uhIxzhKz6(_g#N>fIpE)-oz&!UWrFb_n1BwF=! zoh3s1BDs_YmQ~wGWE{&{X!S(tqTkpfD?Ug|^R?6GV+0too{Y%mLUxQ-`ONR#ebiF7 zXTq>_cvL%b0LYu~!70{g&<4%HHX3)b5t_Cq8)InbzjG^NWOYww8F-}G6)$A*!|_&pY1s6M>w4_p2IIV;j!IUgQXBe@KIaOwzJ`FFjh0;prXp#k6pgXBFUV)it|T zFt=M@a@3q*&>jAu%YohX*DFz55pZT3HvfM~Wkn*aUH%|st}rnGPw(PQUM&Mf2R>1E zM=z1m`_o0axPSdUw2HNBX{>WsNKPz$dDomH;{uEEb(S-lX*O)E^OtXY`F{A#c^yX; z^d~>3m4Xg0XgDpO3NJx_nOi;=m`p#X%TgH>wpuO3;QP*9`*Zfs=FJndigEEgQFGZh ze>eB6|3SF?Dbn;w1Va;-)|v@utzx^TEWU*nCWYo7#2&S~@9FC0#hE_04upJG zoYs;b4g3Rv8;`Kk|A52i#s{|Il`nz0)fK*Q3Vwi|gI5;$9?pqqT|kDmM`&^hX|y5l?8vV37E);{sWP>B}Od^gZ^lcS|S{ zl7Q|B;nP~N>UOyqs>}VqbjR)wL^Rk#;DZmXqZo?j>9ZIH%xW2oLtm#Bb)^+t5+gh~ z`?!{zzd}7j))YRi_`Nfwen4=E{2@9~CPrE;y6~1b+o~{xQu)-4VM$I}np7y>srLq4 zr+YD~{Cn1t^QRS}>&l{*J1rh}19mFBwrArm@T6jhmVv(=KOb=0eO@|3A~f;kOZo(o zYv$|8s@4|0C#P!C>Fm~&yJdTSC=jYP@LJ}pRrPl6y#@bIPn<`m_eNrN)3b$%xCWG1!MgU& zYq)P@C6*m1>zmvm`E#!33uNrJJiy86qFQb~<3egTkIBNI61bM;{ zAQ30*fP^(grnRu1z-P{xF3Y1q_-`S|U=!bzyC$LwyeG?Uzn!kX+0kOmgCAzyt`r*@ z5N+YTd|P$>ArzcVf6p3#u<7XN+kfPo$_-Fk{9PihYsWA$(J6aAc-j#z5S~5NvwUh= zBg*(1b9|1!;*ZY>SS9T*?JcYCti?Yp=W^6{o&;v;Hgq{iVro$MK6vfCEU+r&!Rx zbji^^qV=g_0O@^rFt}DC*DLW{BBscX$u_|~W~*kAVw4liD^vcVayXJs%&6Y4$eYM`EUn0+p8Wp2l34f6lg5X)$WwhWq4-zLYc@ z`;cv1qj^${4>|xN2H^G_NC$KdVWP4#&_Z%1e3(@qgL}0Rf8*k-^)<@oP0pk(EF&7N zf(4B2=RbCvE*M_TobQIvl%&r+ubp^9LcGKhAK32sQ$Qwhk3kcLF}wwj85H$b=AxR; z#mDHwqwvwbI@=Pze?beLK4CJhfDWQ6c_-85-Hcr{P}1qXusOT$M8j}bg5BTh9B ze}}C{3w~JH{p!y8D&L~Q?I@Suh1^{>L(B2T{k&E!KgPqi5U;^*B#0FZl}4BzgD!S( zBrXhQPLK2}Ujv&KtywUf%i#!{jx?sT9TS^G7Pr#S@y7ixyaq}#w zD{ZQ>9|{VPN_8^DsL5u_Ek1u!0hWsk`AZBIJJ~yCHc`wdQBp%cripcM1yC?7Ax_|1 z`kxt!QpJg7vFdwaUemVm_e1sK+dt-RYwztX5Y#9m%pU`6rd?jGW->vd*26`p@~)V$ zVZlEJ;1_?vmYhYBr3&o~VCqNdUv#UyM8K*s4y>wN5v;y<>8@@Iv&a%-zqh7Rl6MEmC-3J{t`e{Rg?9&gu^*I$#9o^H^R(rk-E|h9&*!U-Ovz zq1NYR{V}KSJ!*SC9k5n+3%6x0{@$QYWnCykFic2r3EXq@Um9eF`@^4L8T8K(dw&9% zbmb&3?k=ot%ap7W@%lByU|wLDq`PZ9;oIe*phHne`8k|5{UTK(j}X>l9oF;LtFsa~ z2MW(J2Pd4qtP36I>eWBba({lYDm8}HBt?P?)p-X@Du+NH2{Qoh!$EdwQST9t(>kEU zAK{-a~ymOzXGmhv{yFhWW za`E~#+S|q268{j_ktAPTd-}4&H#H+(=^0^guN3KEFBrbZL=;}z05xVQ3qyTSagS{; z*-1r1MwA{gKfkoL{5zT^rB$y{b)16?>GH^08vJTr_a^%)A3U&e&$LQLS}&umwp!Sb zyUu;SDN@hsX0R^XMyt!qJE7u&c?UdDqlIX=H0G8qpD&D`Ziwko4-9Ev}=Oxji)7H6JspCr52vOA{EmFBNx6Ci8(_re)dFiDh_tc_B0jzPhxb44&g{K+Yw5 zpr*V?f0fG%U;rS@C?xpBwM8y~wHzeH6vtZKb^9~vTn37JoG=}bvE<~PS{1~bOk=chH5ST^u@KB z*Q|@T=haYG(INny2~aBjJD_GGoTJ^Fa_GcJK*^-FPR&IwveVO39L$C0)#0w2Y|XYe z1-aCoE}rQMc8olc)n*wV527yH`P(X=W`K412;($M%iN=2Loiw%bJ`^vO&aeS`2*Zt zT-{C6PrtA3{;bgSx*XOO4L1gw!DFC8GaWTh#&(>=;*05WBkI&aU1PFfYDFyvOJjAo zh4NCCzWnI!*Jo^w4fe`{_nAMmKRa4k>H!@rAs|^$fhO(Pfr>?ob|;?98tTSj&FH)O@Cu_c@8h!z4GBAoi#uX*zgCmZ z?XXGWlWvK?XqAU=k^YGssge2Idj9xSM#vKn?`lgx3f2nd6RtEa$*Hu0GP$dIm+I0l zJY|`nmY=+KjZGoJIgw3h6?w#(D&&2YnFtzV5xnV z|8XP0CwNNjs#ovqvRWs_#bu5;`gl0%z-lFGh2*qC-~DP%tc9Ti!Ex69&;ERpnM!1c(l=!Mbjmqw zu?GgCydmYfA(; zE@eSLk1Eg(V0-!eb5VrU{dTUKt_Z65{0i5r*?976zNsJeZyXTfP%P(s%`_OX{bs|{ zq#-k1d&HW$qEO(k%_J*s4HKpaj9=Xd!1X;3$Rmo&F_FFyWw(hW3_!t9*u@X$@}k|r zX5RG^En800J$^RX*L=L@AX$0mV+A{FoOaHlYhNxY1`*l_qc_up+yE1*H1~stzR1I4 znVZLcW?h+m{=3R~w)o)d=|Jl$xvbl$rVr2nlPoMJ-hs6260e}yRsqB4(A$CicL^7b ztDEW@o?}BnFP8wc!7-m4Y@dGkrD$`sS(*3yI)d&aP4z1wc2cy_#`&+VuRUQ5V2dMN z&Yv#qzpG4bBZRgGa;MPT3Vzm&0@{>fS3n&8&O+|byN25i9M!79 zJY9$5#Qk{N(W0?sg>Pdmj>LNReTF}6RBQS;1_mZI9W#UcoF9i_fnHfaF%kGwvC7nD ziw|B&Q^!L6E$VAtis&{*Z^wRgzxBj)`oUKQ#)dXXwvEP*!l!PzksT=C#xSx_USEd@ za2dt)EI$|ryf>0`3tm7!{&jcy1ogs?vzphfhV-Bz+#>to#Zh%}WEZ(VOH(t;QupPp z8u%T>lX+7NXS zDM{b=8`^DB?Ppu{?{JgwpAK14**Z$LB;3i5z^gF)%u^@Vy(xXQ3F>h#PNCxV*8>*r zvh`nazNyqE%K2FHOaKlzKee)PNZ(XQ4siMsuq+D}<)PMd6px0nhYL6SmQ$u&uI}uL zSZuqs@4ay@%0Z}_j*`q7vi;*pY|h=uD9DHAu%V6epm8fV*rs)zw>i+$y

Wi2kB9vvK-Kn(WvvTD}Y_TktzT ziFUB2mCX%?crd!Qe|C}H>*4ehGh%#P z)Z}^?jbCD39imTltPAwiL`GgQpBK2XX^&|~)vI*k{%Hf>W*><4w5s}0MBu$DEWQ}& zn5(4Lf^;s+UMInjY(AGkPu7Jg?;tUfY3lJ6Nv3Byl@rqiDf9VCJOVGO*j3Z5E&iql zI_^9aH`sHGru&LoRBjtxz2`3AeS_`2lavCyssKKKoA#t&rdk;=KaeEr%A=vHy_Q&k z(2%8&qskoj$Goc5LtFyOdN`EQ!Me0eNeiO=j z#&?2qu}DRDzp`d8Yfu}XS^-0Y!V#9Y0AnK2{|Inq2bNUeKJLoR5EGHAa@?m$<`th+ z#CvZR363AWyldZI@?Llb-zm(SHV~i&U?!1OW;A}jj$@mD^;e6@8HB^$M43!Hjsx<& zJaziOH?e+k#8@M@=+g&bhf&zR!z5N10J9bVb z2$;Q2iJj)G0;~c{t=WZ8w((&m*eG7N9oI7=4)dY)NH&ij;*NQr`wy19y-$s>wmE`A zsbu;F28nO+^;UfyY58Jx@}^Szj=gC_Mpv+b7&J$AqQpS)AgG)K@{KXb!6lWbbGSGa zJzwRtBIi(#y9CR}V<+=EXW_N;rXJn{w(COAqL*GMVd`v{ow9xzt6Qj_Ep;+u$c)-J zqHpy6Ve;|e^Pdt%dnadmhq$HU7Eh&U_Xhqdi)51qHqoyyl$z0kA3);qR7TOO9`(P_ zJfy!H$kTrEiW}$DYGTBNsvBiLUi~|%)#F;8Iuk?ZV4<3BwCW$e)fk?swt`-H6!_4O zPg!Ra4VOS=L!*6LV{=MFg`Q{ols*)so)CBLp8J~rd}?!|;(2yuEq~SU=*I<`%2>1I zy)<7<&78+_ohEUs0+~$QMzG~uVIypWz8;&$b|9O(g8{&;@IzNN0R+kx<|H&gb8pH| zP*3zTSnEELll6*{>@qntD%X9bTIoZE^3NarR~{7bJE{Qj79A8&0RQ zDA#L-oBsR{On=9DJ6xYVq9ZH>?8B*UTr7^c%@;AN*e$_-SgdDBMbNI+PcSc@B=seJ z*b2gO)6P5Aq;CB!&U$(O!`P{>cTP_JdQ?z&=S;wtgFg=w%@1T+kJk}?UEtAZPexA~ zKH#rAYq($5)b{vcDzCfCJZ?QK=&&clzwTegNqFE!QhTNWk)MHxB3<%-x~KqnQD8J# zvRQJo)D651zi*yI^aj=V^JmQ~lNZ)*B+TzIHZealePkrFII+{cdyU=b*XtDTQw==7 z2Ky-m+R>o7&356FDu|(n=|?LPe@uP><5?>`g33f^3kp()V~?V!AX9Z*s$1 zx%Ih%u45Oyu}?9c%DRWMl@Mxqcy)LQ)%gu2;5t|Vr8*c$kb-k%vqLZ^`L4N{!~CnA z&ztY<9Fa;Ylf3OM&L16YpoSR@$v@VtavM1R@EL69DH74_#qfIyCFnZlgT!41m`R6NI+FyI6j(KlRaGa zE5xica&QyR_pU<0Kh(lqW#z}8a<%CZs1GgqW?k8SV9E zLNEQS@R>!cQ6f}$e{l93ZHHVL71S2DDAV)>*;$}08EV}T&IMxCx4OO4AXIq%N+ z(%CJS5|KygV+i=eRL;#yd6&Xs_gQzXujujlV}%4YFTBnRE{pm8uDSGP^`#PM|F8wQ znv&sOHA8FltPoRnd<8&1F`Xs9K;nbNy;iIgSvleGEp!0v;(<^OqHH{<)vn&{5jyc{ z+;-5@yUe=jWA=TxjSL;9T)K0`V;7Z=0MMd5ILgz*%vl2V7+QN$!|EhwM%hn}!*b70 zSW%&~WR>RknF~<_F;?V4iKBdQRw@~{5*Q=i)fk7`y<9Yp%`x~z)BH2Ng!Qz64h?z^;P8@I8m_IFecU?Dw+o}8?I!9pU* z^rg?oCQLPP^K$ERi6be!6mz1%_b%+@2~>rU@Md3QB@1{HvoRg52=M*?)fENfDgelQmH1U&7y`h znJbXy*!%PNq^bn4OK+(NfaqW!0#ZV3P56<)u{NB%8$(wFKA*)nVFImExPjuX)jlu^ zB@gnZ(a28px2Yy#n^1d~97`H+-4{|1O#2Kq-bo$b9DG81b&-Pm4nL zy6ECDu*db}i7Jqs8BGvQ34^|ns7saI za+u?^Q=f8hE@Z8zHAkCun71VxulrpeXLb@~e|NR_igFP-(h7zt4%O;TgQhw~+^Yrh zic8*=dkYX;HjY>NhTM}jT#}7#_bR%!loqLA#FplAsmDey;9?2Kj)Lx&pw_x{nGR>$ z(!V?jFk1Lw2^ODRAE z6@CVJw+tLB_Ji##HC~f>`^!)JzKFghf2}^8M5Wvr$KJDnl=|Y3KE4QmiqV1PHV_Oo z+SfOrcDr2qYw(7sbkVWp$v1)6e#<8nZ=-2Zr+>T&HenZjWD6e^uqw>+K5MjT{#_fP zTA7Cge7$wnAcq$uIquK|NJN5DcO_!uZ^Sr8Y^+&v$#UV2+{Xz=k93M_XpU}Z=CWzm zvmu^NMFDLX2!;}5w>_x3AdFUu^&|fdF zyOx@@dLn_!K|eA_(AC9Go3*#{ppKv~kuZ(;Ly1Xo3|ttpry4gljjvtFqlC_GXt<`G z8k;vC<}=_(o<6PiXOFzsyLk362LREzVpFjk25gv-ML81u$$l*G=($r+xSdOPtVhT10@h4PZrW%}#_02hl!8`m$y+0AphlVG zoR$imLi~$U-PA1k5B651RT8|>1~mV};_Ki-1FcFb>8OmF)T*~%Z_2F8B>EqNnmVV< zqj2_L0MD`D&W4Una zA||PiX{X-%-nboP{`=ZDUyi=Jg@c@WllDuf>o<$IIp|z-WI!hdhh!@wTX{fm1|+MX z7@)a!iqke0{*)$Ix~}=4aKqtiGRt;12lEH}RkfA)J?+4HpR5AzUHXv@uy$3{fvz1f z-?Q^Xny2`>>FVSw1wpC28)=G4%^BNAp3TKEG`}AgQ=@$!+K%Pq!L+e6@lC0ZgsAo? z-?wS#fuViD_RRhXr2A{2ABrOh4&d(+4wv;d{F4%Y8m|+c6)xLB$?u;6QJszsjNUjJ z?B~o>D2DW^vy}lVCANt`YnN(8&=P{AD<$Q#rf>hsc<8S*b1`2!eiWWHyz1dS7stE{ zk`G$t+=r0lu$g;o8hhZxQjO3Bvd$A)PyD1{cmWEXeqe8_ z&_s=uETFNEt*jj=YRVZh9~+`dTi2~OnfzH%XbDJHHALj%NQS6XETS1K=gcXjomf%^ zUR~mBW~~xxOXAcd+U>@>b)HMVtpk-l1%C-jP7yvYvt8nwwAu>dQPC>c4tW(UJ|`cp z=T{!u)t!~63{{X(4sNKPu;lc!Gg5_@esuXaI=(c%Hm|GWIDd;;AXj+`*cEqZMwMuZ z!+3T7m^>ZGmmJcLQTq|ZWqp|Xq#0i3Jsb3$cX+kjEaEgH*K~%2>$ygSX?s@R{rZnq zFx}n8%9kKvTM=ll%KxpsFaKGQio{_^X}J5<7x=Uw!7Os$h8cZ{Je5E{3HeYjj@|`H zb6mzQCg~1LkkHq1Z2 zZ}pw-p6;}eKF}ZmfL0WxE`#C(=FA5~5>aFoeEs%`g3!WFJKU`M9LS}5JGJY#WAD28 z^haUn>3IRwp@O(sV^*Qwqq}V&`qu|Iko|$B7hF|S4N@7imrB~UMsMfiizl4W%!{8&PLd@L0{h)Zr62LYH146iN@sR&uR%26@ z50Nf^aCG)u@->?N|5hA6DCYktjy_PF{C|qG(g+l1#EN2`>o@i=aqp{L7*YCWq(K09 zj851U?mSK+-NU7io;84a`wh2=%E%`frp~I_9fGL@P~=Ws*rzx)q!*4m^p)|%0Y|F5 z0WTCsQG8$W;%iE}>r4XzUCLFn5^=_N8f3{~XG=hd>~<^|YAD&gq7(g}AmKL6>WHlVCPvTul7$?#mVVe@gB zzv=ApE2^QPHi}RW>lfucSIy%0rpi!km?j$B2%ewDl7V?L%H5XgYmV3sJobs1YxW z(%8td1lcDv%fgN0A_mz@a1QrP>)))5Pbt}TPe&*9M@xoJFh|ZYWu9#AqaVqnUf7h- z0!kr{)+UE^!4pwj&7P#G&a88%_T3c#NMPpe(5$pRpY%DmVcs73AoICz^U}ip&tsJg zC3yZtQBITAXaaBw0%XDjlyF4ocK^lpA(+<}6@ECRQaF zux+TWCEaNg1xM2PkhFgS)>8#iM0M;Yp;&9S-ROd}OQ6b&T8a26at*H3ni0WTDBj+h zF3o(Zn&))fd06cC!UrEZd?|+V+5j`+f2`WRR7hLX>K+18Ox==L6p6psNM?kdrsN(k zQGYfw5O7#n^2b*%t+2_|RWzaem|A3*)0P2i7B2EbXZx<|~f6vn(A=^PT zn4<@Pneh=$gb;M8h`8DIxF9#vnPpea^aYb%-bqs@)I#g>?6O!SyifxtfEmq3kesnI z!!}UWJCKsE$E2J_@P3#N8P_U)nZCDc62EJ*xnRP_cH!4|AE~+cXK^0S^51S54hZ}* z>w!ZGZSfD`%nO@gTJ5{DsGhtNV->~O|GX_-0>`6wuitP8<%MgcNcOQ0H{kiY9%{DX z6X}v*2TGk*GbgL`zJl|6BY4n`mA+xEh3_^g^~bZre7=>=3te{&e+b@2CjAg&yN{ETq>6hI!Jem@h62EEWHy={OoRI-+(`4|n=RPml=A**b z=n3lie;tdbHs6`+`;z17+&tvAM4+t5StKPK!EJC9&b}PmjHrzN@e$Ds@nfH|ssrr96hP@Fz73e7zWnI13lnDu4{7qAq@3 z(Dvwp-y;zn)QcPA{CCAvJKEXq9KGqK6e(})kreb1MHK8wL`1v8TF4T`C87CFwV&m< zO5$ro2mNhdRI@PZT51=){39i;%zvv2#%Bl|Pty5ZW1>mwwhh%E+xcb`Be?M(|B38& zNzdQfgC{9Abv~_5&y|$kEtdR#RWHyhC_K;y_=`yATfzxiPXQ*aO9NDwW<(aGAICkB zn#4yXy1ERtd9o_r&7j5#@%(!5%G&w?2E_fMMbPeK|1E#2IcBVBxEn-_YD(hwZ4z5$ zQc)ux3nAwU_nR7xZGmFzEY)WX<{He~rQ(^BI1hQK!E_vo$3QLg%aUi8e-x7SKbTjt z$Z!wtn7VVbH-VZO?=l>}y}zKUQZak~PR^nh>WtjpZNK*?ECil%2S;L_|6i6~r3OfE z^nQZ9je1;fAd6+C8IiX~0$K7B@i=Zb->}%_Mc1a%lm*cN<4-Z_#UA#TkAIVsL`Y&r z5p}~IjY#|l-tywMMyM=&G z5+!8Gtd&F?L9ij(;RnskKQ}csxA;X*?hpjx98;#9JFDUs zc@a&FuhDP*E#w)!iy*~1x*E*^Es<>S0Eu;1G_-B$Ve&huW8WX<^Vxeg>$R}+jpzeE zTImeFx$SZD?eTlIt}8t-%r};SK?cK{VXF6%&v0>U)?|a& zptnB3A4)hs&(6PX!pFqH%DEk19P|D7Nbgg3^nG2<{I8Kn;kLWLfD3)qfh7guy12|b z7Wr&MHc4pR0RO_)a)}qp#?BjMYujhb75wZ6`*r404xE-&4%=vAz;Zy4jjN!a0yP@zwDx?JR$%*~V5x`yh z0l<;10_duts@twtf85=F6@?}Eoin*?bFS-p?#c!1$*a!Hae|WDXBZ3-Wp_j5n3qSz z%to7aDME+UD~F#kVkYl5aGZ2`ngUrPvqq)-XR9iymXa>{vWu~g5|Oj9aTX1F)|(HC-# zdlk1+ycuI}{V~Pb1B0Vzk$oH^jqT8F)$>*ZEImkg#{3;ae!QNHUc94;TK<~(jI)SjoVnW zg~$bFH8X=(EbW$A$xef@lusGj)Va%yHjos8CV=a}ksSObLVL%qK|%hcIZ7m{?iE-5 zlWiBVLF^6Yox*P~f84noM<5tZ{z~eeV&bGur;eE1Gw;DX0^(vlk^_6dd*tY!l3zPi zvZUm(^gE7*Ep~+hJ7<-veeNFknF^$Q+dh{$@kV!BZ{RfFE&H;5cHnS7o*!h$NX6X7SN}EuqU4WGl6c9KACUHvlqbty<6)MY&t9+YQZO4 zcU^3(3wOQ1wEemLWD1xBuJp56e}Z?UBN+%-(t{Ro_T$?Ym(LjAldQ6&5tpSGAzbBu zbR#Twq04GT?CF(_*;7aB^-5*6i2~FdZo|sHd`O3Y5~Ba0;5cY-(%KTDw^G7q5k|f~ zr4e2s*zQs>^djwuppS8w675x-zuJdO8^z97d`?0;{>XlSP4esz%Yvp(| zI<8F)i(lu!@ne2qKj-C=!k5-MaY-0m^6kom(?R8>`znk+*-dUOpIzo-5`S)gKile= zbT|2R>Aa}7y&NNMyNd{)ZL@~pRDI)euQv95Tx>l?k95gEvY9kgg$*e`4e+wDcbu~b zi~Q#9*y{LcxB9!NrmOiA8sfuY00~Tx#^2RtL1OEqv`Rv%)QfXIS?8?M7uNY`!eo}N zt^iK2;CNTfrwRR$^Ic}q7s~hA;I|Z=S?P z8?R(9VjSu@xnSrh0!olRK?hD~jGwv_$vu%&hGbOf@=B-E$DG=*yvA=|R0Sv)POdYW zpV-q%Q}naydC=}b328r@O_E7`oqqSabOvB|n`}d3@!9!x3nd*$NY70W%xdNPXc7;aYPG5w7bpxn9qy>YTAFpp6 z>=$HAjaH*Q3FTNu$kKlDBYu1bAVE7TuO?cLt&Jl>hol}oejT7Um=F-9g7K|~~~!y)Z2AR$?xuU%!N~iZI zKdj8Wf7@IvN#vgtjpAMmmdk;YFRsQN6hepIe|G0b7|#rG$?N?eA!gKJT1LYsBehi2 z%V0V}gy&j5FyEMza}xV6WTR(F)Q)MdTjX|WuNyMa8inwVEox4F%GZgj?u0VGh2b0K zC24766)?ZW^{S1@cZ9jp<@aj>Dfn)4|7KPNUoOX*nGRFk@R)v$s&EhZlM3c%+s^UQ zo|Slm8iq)n<^5t|DQWv*&a97iDsbc7KXl5etx>aN#7bv zKHEEEhr?j&vXR$%YU7OWo7~*M9H0L{_DrHR3{)h#Htc%fL`#L%^L(uO{O^viaVdT2 zkeOl$*pPqBySP|8=kEOjWl}qSukYH;#s!BihuIyk-et-_0cT62_I@!Fe*$~PGvlz3 zV(mGgdWbx1ZDj&Ff*eTakV15dBZ;`gR!->TFn*yek^DAS-(c!}t`A{k$G2Y^p$@V? z|ASrD-wOwP*qdbd5|+jv-h*U0P)RnU#6pj5fKygF?_9DE;cJHmz|m0p8Flxg@@Yqp z?4iW(&!j3}XfiUsKh=z+v+EK7r&%YK=zgi16Dukj(wi!71%yAC&tAU$)- zluJl4{&SMJ+Go|JH-^1qL@ES%>h^6MKDesj%!qfi6N#NJyELV5EfQZTU7bxawN)aQ z0($P3ax9+er7ZBSeUtWNXopc9{s;213C(}_Ve3H2xa04~FIw>r5nSI05gy%IO?s3Z zU%+cvft~B3OhF*d#3FQ`)ttx`}T zF_kCOpNj1I>oWij_M!`_zeE#OS1J09HI`TLp$o+vzX4Vtyt-8v+hK(fr*T2`<&f~h z{3mp9G92f(y{l(@8$Wym<%=Vw7=e1N^gr7~`Q!Wcgfo|7F@7&3Co1{Cf^6N&0eMg^ zk=ANJWsV+&7j^d>M0a5FQUvVNwz_L! z`P*IOwa&$J5D~dGlqWX1efQxoSEzAkomo#lkKbU?O%C#-K;MA>mEorA_4|J^+|auK z6`TGIkK1}hJ%%X?vj!GQoJuQT1Fb?M= zIFw)!Q=~pR`z5S53R3vpg=2zlZuUut{E#9Yug4NlY*2pJZxNX{exVuDubfNjnv!pI zq&Nqk<4w*IK4q;RPiLo+GrIo=kcZEV$d;@pT0ZY(AV;D+o&qo=jO7--Dz8L#Z1JVq zH+9%5&};a%*~E54EsiEwzLZx%8lunjgqdcND&5CQ$eQ`07A2@5mD9_bpWHm|uCio0 zQ;uKOGUWSmUjhU`SqP-@EMRe(siana>#*Uh>O<}=D!@FS04w@E7LN4nApP!G)ciuW zE+O;v-Tr%{}V!Zf?`53&ml>^&x)j%9ji^opI{^N)h zq6rYH-nc=D79-7$`0#BNTe5y;S=`H-hFqK3BHJJI6`NPooZS^1HLj}D880L>G-=jH=oq~HnRT7Idi=z>S|ln z?`@k>Z!pbvso>ZSjuHE3i)<$X?&hkAwQ-s8eM`mRV>GpzgyDxy_M2H=jas~qg155S zKhE?Xy;0wRHYUSOkR&tz6fE~E+XN)Q=Ak6UxnWJ7opl3QZqSfc-J@eszrI#AUJ+*V z?jS@OP@sPqlvgJXKm`PVCyx4C5{veSN7EFS%EP}XcHuYsCueumf`4l{2|m8-DsR7V zWzpktCjIhFYsEQ`&=>uTrjkfuM%Gj{gUo`ZE4k9XSZ^xRVf9uoN)hQvovY8w+%s8A ztjT8q4mY{8i`~HAGy^Ncf z06RPZLNf0+l|TcBfTEt@g?|z*zJ_8PPXe5=T2#T9-^fh^)IG8&_$TEpRU{(-U+lvJ-kSSRgcJXtM-d!feLT+Jm$!KlKiK7Qc z?+qv)th80dcTylF5G6fBdg}H-1u~^6y<2~)bekAqI&b19a^&MB=cKQZ%zlqQUdE>- z?EmNln4;$kk;ys@h**WFnzhCvreHIGnR|pyFUtHiOsCMhV>P+GbkmS+aVWqvpid@S zH@o&ES8H4(%nUsVz{TSKg2gg^b}hDc(0(+`C5GXy@(=&Pru$0&gZ;8%q94(qDk1)q zIH4fB4J<7Kb>s1H3rkF}u)sRt;#)=R^C4;MsMSaL(=S5~nT-E~F{J)MNaRD3eOvW; z24dh;ZprL_T4MX3mMBm_5U9+N059o)2IZ)JpO2%iX?PB@ZV&$VXg-}qWe>h}=?gvT z4IpUlqot;@(T;giT#0n0i7DiP>OTR@XxiWtuK!@t8*2#4|9phHQ8jG?8xJsqM+VHKFi7>eDyc`fBGdR;%C%Eli#6LZ3;p zvFd|-mhoG&cT&9tl8ba_Xzy#jmnx`p=gWX3y@XL&D+^-NrzU0#tP)H(eK zJJAKd_=}h9Y*Az_+w5vD88-RFJ9`j z$`lF6L_KxZA$z}RQ?R2c(Sbg1do;t=RZOa%LFwB?% z(2MiNi;nAu7BhP;Mv5@3{T)%Ae|_k^_Gj$AU`QBDbXEL}SwRtd`(9E`06Hq=T~3H+ z#`p+hr9&k5xzV(Y*9kTR820Z^r2G&I?1Wu@@9TMZXQtt7;QT1V7b$>%%*>Q$SY!tih^beONxFZ0a@7dzB=HWsiZ;o&Z@Sz?P$N@pnRvo&FT6PV}&_(j9}s*{<| z8<-#NT71(IZyI4LlssrJ(o2BrUBC6CX1fQ*j3*}93o7oHoV!tabUg?5|9%><|Gzv8 zd9ffwu8f7)Yg=mq*NM9tFma*POmpcxZ`XSaVyig@zxiSG^2M}QALWJeOZXG_{zbhx zl2WwaJdfHn-MSu9e6MI3pgA+)9}dEPodr(mMU!QW`iBUM0w%qKO{04O>VEgYvZ9=q~A;Z`G z0#(^Gqt(m@jcYmw1q>-RcWupCqXzUI7TuB_0YKcFrsp0O8HSHlrFb!XFxnl21q=|f z8B$DG8hY9IVGIo(e@0kw7q9Ku7+dMR286i>I{Gd0rb53F?)%*O@~ln(-SCL^Kp(#c z$LMqPw0cP-z=oCymDm>p^sj?#IWQ*PNBkcigx`$)y7qc)@?XaVuxDH+#BC}#ALm?P zYk?_){%^uD-qU^RFE}5_F~Eo!J#wOP$lU*tpm9DgF7g;x>+_(0{laA>o-cvILnhAf z)XFd~qO6#G(nlNCsJ-hWFl})zCgI@Lv}MUjQX1p6@E{}sCj#{s5~GS z_dL@e(`ui!7lzyH;t}QmLk^iyR@hL4LWHhugy!}B?i8ZlWf*ONRsI8qs#K)D98*b_ zE|zDO_1fw7TSUf2>b8NELBz+3s1ag?O#UO8{4<#re?2Q628Y}a&6?vZ`wX|EN60a~ zc-XI=)L=Kf;-nFA(n^`QI$(oxDP#SSF&Zf~%ky-ZY$F>rs7?wT7pX2@f8bNkV0PqO z4lHGHX7@ISknH%B!&@(&^#V1n-Q6LD2&84aWulj(^0_L3+zKf7`uF>uA-|h!S>sA7 zOiDVt&!|j$o@|V;an7(qsvUzHayd0*u4M$i&KOvI(T;fzfL{#Yq?kyHmGi^^wBTl0v1lR8 z!|P|SFGbOObAHflCkH>@{k%UQo%7RAIsMm}e#`;Pp@phwpJ>nN`W)77^v zRo)AK?7{pzSqS0}%x5@tV+_SkAPsbc4vKHlHs6gmpU(DiIV~?saZI+VuVv? zc&4uFTH&tPTNCa2)3%R)^^`Y&8#E?@lA$ZTukcrQx;wp2@VN2bv5Mil1q+EbxW}rq zrC2cbQ10$CuCgKIlN6tg=ea1{Ung*|+~>n~Mf>-I!qBK3V}b$}u}rS}m(gNG?;F%kbp zXRKyh%~H^6)dnP(;rCwUzobMp94wD(waV1j<(NkCea~Qct)i0hQtG23`wrc;(ICWQ z%j$y1LjR`WaSf#_R;IT-*@@jlx2M5xW(h^7LwvjTJ~%{ zFHBuqbeU74Bc-wyWaQj*X2Rp zyf@d9+d)lL1A=Q%ekjoJ=of!PEL#$TMT2LaJoQ2(J_mL~hqgX5%PsddQUuOC`ZhW) zL5^YikDQdyUpo_+M);xEi#-#57!UQ$#lmHNK!7auJ7zI61pzMEyB-a^1$V1v8H}^) zTJhU586!3?mIMca`^)Z|Z(FAsqKf%#8KroW_luZdB60;ReL6)RQQ+%UogR(GzDe-o zZV$x8ODoCXwzVXO;OM}`1UVQoIAs&u)UcXnZNhI0>&oxVY$ZvI+ZnP*B2?c3)d%8= zRq)#~yuSXsSV{)8Qna>L;_0OgzVp)KyO_WZ`eO4~E=clN2klJk9=!D!1Pz((#44md z1H)2JU4(~S(ME$yMXoi_Kml{_RjRGfwmKLfC$>T%cLvEMT-R*7zsa5H6rAo}>tQwJh*sjyzSX*`vc@as{x%SzRC^BQeKB zhu_YSi5n|&h1wntdgPBp_kux6d1)6ssIfC-4TxEMo|FSbxc9OsSQ=cgv7T-o^NUe}x1uemr6ExXDAM9E@#$1J9z$~&QEh=`YE^-A7eFNr&jNqOQBZo`= z&##AoS%h^zT%v#YC+xo#z%cT>K?`yPOxKG*3!vdMv)aX=1@O&*(7~I&0q{}68F|nG zxaj?3$`%G%0J6frwk7L~$o2nwoTF!%LIxT7*}w={j51|gU?6*)feojwwFf#h)xNHK z`Pui$Dakh-U})kIrSy?%#_M7{*Xke^&S!h;z1AAa`6dAk8iA6{CwwWtQg3 zqu=1y{bZR!xv#99VK*BOgfmmaz#I}4t=u1n|1g0m>-^Kv=|5_A@iOi$@83p>=Xncx z$^y}u)QrU!zyp;Ih^5IF?i^YHjcQmxG7d;8Jo#6@X@mJmA6rzu&jdBf5dTm8ODayPo(`M_606u+-S!Q8 zUYB+zxsDF*7h4$oHecuV*+;bpbk`h>dCrsuevouuZax^?u6#CflM&lQ Date: Mon, 20 May 2013 13:45:01 -0400 Subject: [PATCH 02/99] Active region boundary parameters need to be bigger when running in GGA mode. CGL performance is quite a bit better as a result. -- The troule stems from the fact that we may be trying to genotype indels even though it appears there are only SNPs in the reads. --- .../gatk/walkers/haplotypecaller/HaplotypeCaller.java | 11 ++++++++--- ...llerComplexAndSymbolicVariantsIntegrationTest.java | 4 ++-- .../HaplotypeCallerIntegrationTest.java | 2 +- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index f065a0d7d..fd8a1968b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -418,7 +418,8 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private final static int PADDING_AROUND_OTHERS_FOR_CALLING = 150; // the maximum extent into the full active region extension that we're willing to go in genotyping our events - private final static int MAX_GENOTYPING_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_DISCOVERY_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_GGA_ACTIVE_REGION_EXTENSION = 100; private ActiveRegionTrimmer trimmer = null; @@ -549,7 +550,8 @@ public class HaplotypeCaller extends ActiveRegionWalker, In haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); trimmer = new ActiveRegionTrimmer(DEBUG, PADDING_AROUND_SNPS_FOR_CALLING, PADDING_AROUND_OTHERS_FOR_CALLING, - MAX_GENOTYPING_ACTIVE_REGION_EXTENSION, getToolkit().getGenomeLocParser()); + UAC.GenotypingMode.equals(GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES) ? MAX_GGA_ACTIVE_REGION_EXTENSION : MAX_DISCOVERY_ACTIVE_REGION_EXTENSION, + getToolkit().getGenomeLocParser()); } //--------------------------------------------------------------------------------------------------------------- @@ -751,7 +753,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); if ( ! dontTrimActiveRegions ) { - return trimActiveRegion(activeRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); + return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); } else { // we don't want to trim active regions, so go ahead and use the old one return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); @@ -763,6 +765,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In * * @param originalActiveRegion our full active region * @param haplotypes the list of haplotypes we've created from assembly + * @param activeAllelesToGenotype additional alleles we might need to genotype (can be empty) * @param fullReferenceWithPadding the reference bases over the full padded location * @param paddedReferenceLoc the span of the reference bases * @return an AssemblyResult containing the trimmed active region with all of the reads we should use @@ -771,12 +774,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ private AssemblyResult trimActiveRegion(final ActiveRegion originalActiveRegion, final List haplotypes, + final List activeAllelesToGenotype, final byte[] fullReferenceWithPadding, final GenomeLoc paddedReferenceLoc) { if ( DEBUG ) logger.info("Trimming active region " + originalActiveRegion + " with " + haplotypes.size() + " haplotypes"); EventMap.buildEventMapsForHaplotypes(haplotypes, fullReferenceWithPadding, paddedReferenceLoc, DEBUG); final TreeSet allVariantsWithinFullActiveRegion = EventMap.getAllVariantContexts(haplotypes); + allVariantsWithinFullActiveRegion.addAll(activeAllelesToGenotype); final ActiveRegion trimmedActiveRegion = trimmer.trimRegion(originalActiveRegion, allVariantsWithinFullActiveRegion); if ( trimmedActiveRegion == null ) { diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index d6c6a4f33..9ef9fea77 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "90cbcc7e959eb591fb7c5e12d65e0e40"); + "008029ee34e1becd8312e3c4d608033c"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "50894abb9d156bf480881cb5cb2a8a7d"); + "ae8d95ffe77515cc74a55c2afd142826"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 15516d090..2d4223e5c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -96,7 +96,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "dbbc884a975587d8e7255ce47b58f438"); + "bb30d0761dc9e2dfd57bfe07b72d06d8"); } @Test From 62fc88f92e239edd44ec05ac75e0f99fdc9e62e7 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 20 May 2013 13:36:30 -0400 Subject: [PATCH 03/99] CombineVariants no longer adds PASS to unfiltered records -- [Delivers #49876703] -- Add integration test and test file -- Update SymbolicAlleles combine variant tests, which was turning unfiltered records into PASS! --- .../variantutils/CombineVariantsIntegrationTest.java | 11 +++++++++++ .../sting/utils/variant/GATKVariantContextUtils.java | 6 +++++- .../walkers/CNV/SymbolicAllelesIntegrationTest.java | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java index 6c4072962..917cbd542 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java @@ -189,4 +189,15 @@ public class CombineVariantsIntegrationTest extends WalkerTest { Arrays.asList("aa926eae333208dc1f41fe69dc95d7a6")); cvExecuteTest("combineDBSNPDuplicateSites:", spec, true); } + + @Test + public void combineLeavesUnfilteredRecordsUnfiltered() { + WalkerTestSpec spec = new WalkerTestSpec( + "-T CombineVariants --no_cmdline_in_header -o %s " + + " -R " + b37KGReference + + " -V " + privateTestDir + "combineVariantsLeavesRecordsUnfiltered.vcf", + 1, + Arrays.asList("f8c014d0af7e014475a2a448dc1f9cef")); + cvExecuteTest("combineLeavesUnfilteredRecordsUnfiltered: ", spec, false); + } } \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java index 4565402b9..b5a6e82a0 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java @@ -697,6 +697,7 @@ public class GATKVariantContextUtils { int maxAC = -1; final Map attributesWithMaxAC = new LinkedHashMap(); double log10PError = CommonInfo.NO_LOG10_PERROR; + boolean anyVCHadFiltersApplied = false; VariantContext vcWithMaxAC = null; GenotypesContext genotypes = GenotypesContext.create(); @@ -729,6 +730,7 @@ public class GATKVariantContextUtils { log10PError = vc.getLog10PError(); filters.addAll(vc.getFilters()); + anyVCHadFiltersApplied |= vc.filtersWereApplied(); // // add attributes @@ -841,7 +843,9 @@ public class GATKVariantContextUtils { builder.alleles(alleles); builder.genotypes(genotypes); builder.log10PError(log10PError); - builder.filters(filters.isEmpty() ? filters : new TreeSet(filters)); + if ( anyVCHadFiltersApplied ) { + builder.filters(filters.isEmpty() ? filters : new TreeSet<>(filters)); + } builder.attributes(new TreeMap(mergeInfoWithMaxAC ? attributesWithMaxAC : attributes)); // Trim the padded bases of all alleles if necessary diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java index 4aaba0d70..bfabe2bc1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java @@ -57,7 +57,7 @@ public class SymbolicAllelesIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString(b36KGReference, "symbolic_alleles_2.vcf"), 1, - Arrays.asList("bf5a09f783ab1fa44774c81f91d10921")); + Arrays.asList("30f66a097987330d42e87da8bcd6be21")); executeTest("Test symbolic alleles mixed in with non-symbolic alleles", spec); } } From 1f3624d2046738a0a8f827489e49b6a8282c7477 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Tue, 7 May 2013 11:59:18 -0400 Subject: [PATCH 05/99] Base Recalibrator doesn't recalibrate all reads, so the final output line was confusing --- .../sting/gatk/walkers/bqsr/BaseRecalibrator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index dde49b7db..278317da3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -519,7 +519,7 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche generatePlots(); } - logger.info("Processed: " + result + " reads"); + logger.info("BaseRecalibrator was able to recalibrate " + result + " reads"); } private RecalibrationTables getRecalibrationTable() { From 58f4b8122221e052143e4d0e4771bd0a52995c17 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Tue, 7 May 2013 12:23:24 -0400 Subject: [PATCH 06/99] Count Reads should use a Long instead of an Integer for counts to prevent overflows. Added unit test. --- .../sting/gatk/walkers/qc/CountReads.java | 11 +++-- .../traversals/TraverseReadsUnitTest.java | 8 +-- .../gatk/walkers/qc/CountReadsUnitTest.java | 49 +++++++++++++++++++ 3 files changed, 61 insertions(+), 7 deletions(-) create mode 100644 public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java index 825fcac90..45beea28f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java @@ -66,11 +66,16 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord; */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @Requires({DataSource.READS, DataSource.REFERENCE}) -public class CountReads extends ReadWalker implements NanoSchedulable { +public class CountReads extends ReadWalker implements NanoSchedulable { public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) { return 1; } - @Override public Integer reduceInit() { return 0; } - @Override public Integer reduce(Integer value, Integer sum) { return value + sum; } + @Override public Long reduceInit() { return 0L; } + + public Long reduce(Integer value, Long sum) { return (long) value + sum; } + + public void onTraversalDone(Long result) { + logger.info("CountReads counted " + result + " reads in the traversal"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java index 8bc373fe8..e8840c39f 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java @@ -153,11 +153,11 @@ public class TraverseReadsUnitTest extends BaseTest { countReadWalker.onTraversalDone(accumulator); - if (!(accumulator instanceof Integer)) { - fail("Count read walker should return an interger."); + if (!(accumulator instanceof Long)) { + fail("Count read walker should return a Long."); } - if (((Integer) accumulator) != 10000) { - fail("there should be 10000 mapped reads in the index file, there was " + ((Integer) accumulator)); + if (!accumulator.equals(new Long(10000))) { + fail("there should be 10000 mapped reads in the index file, there was " + (accumulator)); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java new file mode 100644 index 000000000..cf115cc76 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java @@ -0,0 +1,49 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.qc; + +import org.testng.Assert; +import org.testng.annotations.Test; + +public class CountReadsUnitTest { + + @Test + public void testReadsDoNotOverflowInt() { + + final CountReads walker = new CountReads(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + Long sum = walker.reduceInit(); + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + final Integer x = walker.map(null, null, null); + sum = walker.reduce(x, sum); + } + + Assert.assertEquals(sum.longValue(), moreThanMaxInt); + } +} From 20c7a8903020900b1dbc9af0c7e1877118bbe764 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Tue, 7 May 2013 13:53:43 -0400 Subject: [PATCH 07/99] Fixes to get accurate read counts for Read traversals 1. Don't clone the dataSource's metrics object (because then the engine won't continue to get updated counts) 2. Use the dataSource's metrics object in the CountingFilteringIterator and not the first shard's object! 3. Synchronize ReadMetrics.incrementMetrics to prevent race conditions. Also: * Make sure users realize that the read counts are approximate in the print outs. * Removed a lot of unused cruft from the metrics object while I was in there. * Added test to make sure that the ReadMetrics read count does not overflow ints. * Added unit tests for traversal metrics (reads, loci, and active region traversals); these test counts of reads and records. --- .../sting/gatk/ReadMetrics.java | 135 +------- .../providers/LocusReferenceView.java | 4 +- .../gatk/datasources/reads/SAMDataSource.java | 14 +- .../sting/gatk/executive/MicroScheduler.java | 3 +- .../filters/CountingFilteringIterator.java | 65 ++-- .../gatk/traversals/TraversalEngine.java | 9 - .../traversals/TraverseActiveRegions.java | 2 - .../gatk/traversals/TraverseDuplicates.java | 1 - .../gatk/traversals/TraverseLociNano.java | 1 - .../gatk/traversals/TraverseReadPairs.java | 1 - .../gatk/traversals/TraverseReadsNano.java | 2 - .../sting/gatk/ReadMetricsUnitTest.java | 321 ++++++++++++++++++ .../gatk/walkers/qc/CountReadsUnitTest.java | 4 +- 13 files changed, 384 insertions(+), 178 deletions(-) create mode 100644 public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java diff --git a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java index aadb57985..f73e7ccd5 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java +++ b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java @@ -40,37 +40,27 @@ public class ReadMetrics implements Cloneable { private long nRecords; // How many reads have we processed, along with those skipped for various reasons private long nReads; - private long nSkippedReads; - private long nUnmappedReads; - private long nNotPrimary; - private long nBadAlignments; - private long nSkippedIndels; - private long nDuplicates; - private Map counter = new HashMap(); + + // keep track of filtered records by filter type (class) + private Map filterCounter = new HashMap<>(); /** * Combines these metrics with a set of other metrics, storing the results in this class. * @param metrics The metrics to fold into this class. */ - public void incrementMetrics(ReadMetrics metrics) { + public synchronized void incrementMetrics(ReadMetrics metrics) { nRecords += metrics.nRecords; nReads += metrics.nReads; - nSkippedReads += metrics.nSkippedReads; - nUnmappedReads += metrics.nUnmappedReads; - nNotPrimary += metrics.nNotPrimary; - nBadAlignments += metrics.nBadAlignments; - nSkippedIndels += metrics.nSkippedIndels; - nDuplicates += metrics.nDuplicates; - for(Map.Entry counterEntry: metrics.counter.entrySet()) { + for(Map.Entry counterEntry: metrics.filterCounter.entrySet()) { Class counterType = counterEntry.getKey(); - long newValue = (counter.containsKey(counterType) ? counter.get(counterType) : 0) + counterEntry.getValue(); - counter.put(counterType,newValue); + long newValue = (filterCounter.containsKey(counterType) ? filterCounter.get(counterType) : 0) + counterEntry.getValue(); + filterCounter.put(counterType, newValue); } } /** * Create a copy of the given read metrics. - * @return + * @return a non-null clone */ public ReadMetrics clone() { ReadMetrics newMetrics; @@ -82,13 +72,7 @@ public class ReadMetrics implements Cloneable { } newMetrics.nRecords = nRecords; newMetrics.nReads = nReads; - newMetrics.nSkippedReads = nSkippedReads; - newMetrics.nUnmappedReads = nUnmappedReads; - newMetrics.nNotPrimary = nNotPrimary; - newMetrics.nBadAlignments = nBadAlignments; - newMetrics.nSkippedIndels = nSkippedIndels; - newMetrics.nDuplicates = nDuplicates; - newMetrics.counter = new HashMap(counter); + newMetrics.filterCounter = new HashMap<>(filterCounter); return newMetrics; } @@ -96,16 +80,16 @@ public class ReadMetrics implements Cloneable { public void incrementFilter(SamRecordFilter filter) { long c = 0; - if ( counter.containsKey(filter.getClass()) ) { - c = counter.get(filter.getClass()); + if ( filterCounter.containsKey(filter.getClass()) ) { + c = filterCounter.get(filter.getClass()); } - counter.put(filter.getClass(), c + 1L); + filterCounter.put(filter.getClass(), c + 1L); } public Map getCountsByFilter() { - final TreeMap sortedCounts = new TreeMap(); - for(Map.Entry counterEntry: counter.entrySet()) { + final TreeMap sortedCounts = new TreeMap<>(); + for(Map.Entry counterEntry: filterCounter.entrySet()) { sortedCounts.put(counterEntry.getKey().getSimpleName(),counterEntry.getValue()); } return sortedCounts; @@ -143,95 +127,4 @@ public class ReadMetrics implements Cloneable { public void incrementNumReadsSeen() { nReads++; } - - /** - * Gets the cumulative number of reads skipped in the course of this run. - * @return Cumulative number of reads skipped in the course of this run. - */ - public long getNumSkippedReads() { - return nSkippedReads; - } - - /** - * Increments the cumulative number of reads skipped in the course of this run. - */ - public void incrementNumSkippedReads() { - nSkippedReads++; - } - - /** - * Gets the number of unmapped reads skipped in the course of this run. - * @return The number of unmapped reads skipped. - */ - public long getNumUnmappedReads() { - return nUnmappedReads; - } - - /** - * Increments the number of unmapped reads skipped in the course of this run. - */ - public void incrementNumUnmappedReads() { - nUnmappedReads++; - } - - /** - * - * @return - */ - public long getNumNonPrimaryReads() { - return nNotPrimary; - } - - /** - * - */ - public void incrementNumNonPrimaryReads() { - nNotPrimary++; - } - - /** - * - * @return - */ - public long getNumBadAlignments() { - return nBadAlignments; - } - - /** - * - */ - public void incrementNumBadAlignments() { - nBadAlignments++; - } - - /** - * - * @return - */ - public long getNumSkippedIndels() { - return nSkippedIndels; - } - - /** - * - */ - public void incrementNumSkippedIndels() { - nSkippedIndels++; - } - - /** - * - * @return - */ - public long getNumDuplicates() { - return nDuplicates; - } - - /** - * - */ - public void incrementNumDuplicates() { - nDuplicates++; - } - } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java index d5b7d0487..b5efbc693 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java @@ -176,13 +176,13 @@ public class LocusReferenceView extends ReferenceView { /** * Gets the reference context associated with this particular point or extended interval on the genome. - * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beoynd current bounds, it will be trimmed down. + * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beyond current bounds, it will be trimmed down. * @return The base at the position represented by this genomeLoc. */ public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) { //validateLocation( genomeLoc ); - GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), bounds.getContigIndex(), + GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), genomeLoc.getContigIndex(), getWindowStart(genomeLoc), getWindowStop(genomeLoc) ); int refStart = -1; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index 1223dd2af..bf25582ab 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -440,9 +440,8 @@ public class SAMDataSource { * @return Cumulative read metrics. */ public ReadMetrics getCumulativeReadMetrics() { - synchronized(readMetrics) { - return readMetrics.clone(); - } + // don't return a clone here because the engine uses a pointer to this object + return readMetrics; } /** @@ -450,9 +449,7 @@ public class SAMDataSource { * @param readMetrics The 'incremental' read metrics, to be incorporated into the cumulative metrics. */ public void incorporateReadMetrics(final ReadMetrics readMetrics) { - synchronized(this.readMetrics) { - this.readMetrics.incrementMetrics(readMetrics); - } + this.readMetrics.incrementMetrics(readMetrics); } public StingSAMIterator seek(Shard shard) { @@ -548,7 +545,10 @@ public class SAMDataSource { MergingSamRecordIterator mergingIterator = readers.createMergingIterator(iteratorMap); - return applyDecoratingIterators(shard.getReadMetrics(), + // The readMetrics object being passed in should be that of this dataSource and NOT the shard: the dataSource's + // metrics is intended to keep track of the reads seen (and hence passed to the CountingFilteringIterator when + // we apply the decorators), whereas the shard's metrics is used to keep track the "records" seen. + return applyDecoratingIterators(readMetrics, enableVerification, readProperties.useOriginalBaseQualities(), new ReleasingIterator(readers,StingSAMIteratorAdapter.adapt(mergingIterator)), diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java index 4ffdc88d8..7077db49c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java @@ -52,7 +52,6 @@ import javax.management.ObjectName; import java.io.File; import java.lang.management.ManagementFactory; import java.util.*; -import java.util.concurrent.TimeUnit; /** @@ -368,7 +367,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { for ( final long countsByFilter : cumulativeMetrics.getCountsByFilter().values()) nSkippedReads += countsByFilter; - logger.info(String.format("%d reads were filtered out during traversal out of %d total (%.2f%%)", + logger.info(String.format("%d reads were filtered out during the traversal out of approximately %d total reads (%.2f%%)", nSkippedReads, cumulativeMetrics.getNumReadsSeen(), 100.0 * MathUtils.ratio(nSkippedReads, cumulativeMetrics.getNumReadsSeen()))); diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java index 3e50632d9..6c926e3cf 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java @@ -1,28 +1,28 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + package org.broadinstitute.sting.gatk.filters; import net.sf.picard.filter.SamRecordFilter; @@ -41,7 +41,8 @@ import java.util.NoSuchElementException; * @author Mark DePristo */ public class CountingFilteringIterator implements CloseableIterator { - private final ReadMetrics runtimeMetrics; + private final ReadMetrics globalRuntimeMetrics; + private final ReadMetrics privateRuntimeMetrics; private final Iterator iterator; private final Collection filters; private SAMRecord next = null; @@ -54,7 +55,8 @@ public class CountingFilteringIterator implements CloseableIterator { * @param filters the filter (which may be a FilterAggregator) */ public CountingFilteringIterator(ReadMetrics metrics, Iterator iterator, Collection filters) { - this.runtimeMetrics = metrics; + this.globalRuntimeMetrics = metrics; + privateRuntimeMetrics = new ReadMetrics(); this.iterator = iterator; this.filters = filters; next = getNextRecord(); @@ -95,6 +97,8 @@ public class CountingFilteringIterator implements CloseableIterator { public void close() { CloserUtil.close(iterator); + // update the global metrics with all the data we collected here + globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics); } /** @@ -105,12 +109,15 @@ public class CountingFilteringIterator implements CloseableIterator { private SAMRecord getNextRecord() { while (iterator.hasNext()) { SAMRecord record = iterator.next(); - runtimeMetrics.incrementNumReadsSeen(); + + // update only the private copy of the metrics so that we don't need to worry about race conditions + // that can arise when trying to update the global copy; it was agreed that this is the cleanest solution. + privateRuntimeMetrics.incrementNumReadsSeen(); boolean filtered = false; for(SamRecordFilter filter: filters) { if(filter.filterOut(record)) { - runtimeMetrics.incrementFilter(filter); + privateRuntimeMetrics.incrementFilter(filter); filtered = true; break; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java index 0811e5e70..529b3ef17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java @@ -100,15 +100,6 @@ public abstract class TraversalEngine,Provide // by default there's nothing to do } - /** - * Update the cumulative traversal metrics according to the data in this shard - * - * @param shard a non-null shard - */ - public void updateCumulativeMetrics(final Shard shard) { - updateCumulativeMetrics(shard.getReadMetrics()); - } - /** * Update the cumulative traversal metrics according to the data in this shard * diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index b1e5b907f..cac93cb07 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -259,8 +259,6 @@ public final class TraverseActiveRegions extends TraversalEngine extends TraversalEngine extends TraversalEngine, final TraverseResults result = traverse( walker, locusView, referenceView, referenceOrderedDataView, sum ); sum = result.reduceResult; dataProvider.getShard().getReadMetrics().incrementNumIterations(result.numIterations); - updateCumulativeMetrics(dataProvider.getShard()); } // We have a final map call to execute here to clean up the skipped based from the diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java index aed88509e..764011a48 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java @@ -90,7 +90,6 @@ public class TraverseReadPairs extends TraversalEngine extends TraversalEngine, final Iterator aggregatedInputs = aggregateMapData(dataProvider); final T result = nanoScheduler.execute(aggregatedInputs, myMap, sum, myReduce); - updateCumulativeMetrics(dataProvider.getShard()); - return result; } diff --git a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java new file mode 100644 index 000000000..32fd35d95 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java @@ -0,0 +1,321 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.Tags; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.providers.ShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.executive.WindowMaker; +import org.broadinstitute.sting.gatk.filters.ReadFilter; +import org.broadinstitute.sting.gatk.iterators.ReadTransformer; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.gatk.traversals.*; +import org.broadinstitute.sting.gatk.walkers.*; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.SampleUtils; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.sam.*; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.util.*; + +public class ReadMetricsUnitTest extends BaseTest { + + @Test + public void testReadsSeenDoNotOverflowInt() { + + final ReadMetrics metrics = new ReadMetrics(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + metrics.incrementNumReadsSeen(); + } + + Assert.assertEquals(metrics.getNumReadsSeen(), moreThanMaxInt); + Assert.assertTrue(metrics.getNumReadsSeen() > (long) Integer.MAX_VALUE); + + logger.warn(String.format("%d %d %d", Integer.MAX_VALUE, moreThanMaxInt, Long.MAX_VALUE)); + } + + + // Test the accuracy of the read metrics + + private IndexedFastaSequenceFile reference; + private SAMSequenceDictionary dictionary; + private SAMFileHeader header; + private GATKSAMReadGroupRecord readGroup; + private GenomeLocParser genomeLocParser; + private File testBAM; + + private static final int numReadsPerContig = 250000; + private static final List contigs = Arrays.asList("1", "2", "3"); + + @BeforeClass + private void init() throws IOException { + reference = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + dictionary = reference.getSequenceDictionary(); + genomeLocParser = new GenomeLocParser(dictionary); + header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test"); + header.setSequenceDictionary(dictionary); + header.setSortOrder(SAMFileHeader.SortOrder.coordinate); + readGroup = new GATKSAMReadGroupRecord(header.getReadGroup("test")); + + final List reads = new ArrayList<>(); + for ( final String contig : contigs ) { + for ( int i = 1; i <= numReadsPerContig; i++ ) { + reads.add(buildSAMRecord("read" + contig + "_" + i, contig, i)); + } + } + + createBAM(reads); + } + + private void createBAM(final List reads) throws IOException { + testBAM = File.createTempFile("TraverseActiveRegionsUnitTest", ".bam"); + testBAM.deleteOnExit(); + + SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM); + for (GATKSAMRecord read : reads ) { + out.addAlignment(read); + } + out.close(); + + new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit(); + } + + // copied from LocusViewTemplate + protected GATKSAMRecord buildSAMRecord(final String readName, final String contig, final int alignmentStart) { + GATKSAMRecord record = new GATKSAMRecord(header); + + record.setReadName(readName); + record.setReferenceIndex(dictionary.getSequenceIndex(contig)); + record.setAlignmentStart(alignmentStart); + + record.setCigarString("1M"); + record.setReadString("A"); + record.setBaseQualityString("A"); + record.setReadGroup(readGroup); + + return record; + } + + @Test + public void testCountsFromReadTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromLocusTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final TraverseLociNano traverseLociNano = new TraverseLociNano(1); + final DummyLocusWalker walker = new DummyLocusWalker(); + traverseLociNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseLociNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + + //dataSource.close(); + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromActiveRegionTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final List intervals = new ArrayList<>(contigs.size()); + for ( final String contig : contigs ) + intervals.add(genomeLocParser.createGenomeLoc(contig, 1, numReadsPerContig)); + + final TraverseActiveRegions traverseActiveRegions = new TraverseActiveRegions(); + final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); + traverseActiveRegions.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseActiveRegions.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + traverseActiveRegions.endTraversal(walker, 0); + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + class DummyLocusWalker extends LocusWalker { + @Override + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyReadWalker extends ReadWalker { + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyActiveRegionWalker extends ActiveRegionWalker { + @Override + public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return new ActivityProfileState(ref.getLocus(), 0.0); + } + + @Override + public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java index cf115cc76..8f5541c41 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java @@ -25,10 +25,11 @@ package org.broadinstitute.sting.gatk.walkers.qc; +import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.Test; -public class CountReadsUnitTest { +public class CountReadsUnitTest extends BaseTest { @Test public void testReadsDoNotOverflowInt() { @@ -45,5 +46,6 @@ public class CountReadsUnitTest { } Assert.assertEquals(sum.longValue(), moreThanMaxInt); + Assert.assertTrue(sum.longValue() > (long) Integer.MAX_VALUE); } } From d9cdc5d006e470fb5429c55efa1e74b0be68a4bc Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Tue, 21 May 2013 15:32:24 -0400 Subject: [PATCH 08/99] Optimization: track alleles in the PerReadAlleleLikelihoodMap with a HashSet -- The previous version of PerReadAlleleLikelihoodMap only stored the alleles in an ArrayList, and used ArrayList.contains() to determine if an allele was already present in the map. This is very slow with many alleles. Now keeps both the ArrayList (for get() performance) and a Set of alleles for contains(). --- .../genotyper/PerReadAlleleLikelihoodMap.java | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 150e24c51..c8bb7ff79 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -42,13 +42,13 @@ import java.util.*; * For each read, this holds underlying alleles represented by an aligned read, and corresponding relative likelihood. */ public class PerReadAlleleLikelihoodMap { - protected final List alleles; - protected final Map> likelihoodReadMap; + /** A set of all of the allele, so we can efficiently determine if an allele is already present */ + private final Set allelesSet = new HashSet<>(); + /** A list of the unique allele, as an ArrayList so we can call get(i) efficiently */ + protected final List alleles = new ArrayList<>(); + protected final Map> likelihoodReadMap = new LinkedHashMap<>(); - public PerReadAlleleLikelihoodMap() { - likelihoodReadMap = new LinkedHashMap>(); - alleles = new ArrayList(); - } + public PerReadAlleleLikelihoodMap() { } /** * Add a new entry into the Read -> ( Allele -> Likelihood ) map of maps. @@ -61,18 +61,20 @@ public class PerReadAlleleLikelihoodMap { if ( a == null ) throw new IllegalArgumentException("Cannot add a null allele to the allele likelihood map"); if ( likelihood == null ) throw new IllegalArgumentException("Likelihood cannot be null"); if ( likelihood > 0.0 ) throw new IllegalArgumentException("Likelihood must be negative (L = log(p))"); + Map likelihoodMap = likelihoodReadMap.get(read); if (likelihoodMap == null){ // LinkedHashMap will ensure iterating through alleles will be in consistent order - likelihoodMap = new LinkedHashMap(); + likelihoodMap = new LinkedHashMap<>(); } likelihoodReadMap.put(read,likelihoodMap); likelihoodMap.put(a,likelihood); - if (!alleles.contains(a)) + if (!allelesSet.contains(a)) { + allelesSet.add(a); alleles.add(a); - + } } public ReadBackedPileup createPerAlleleDownsampledBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) { @@ -165,6 +167,7 @@ public class PerReadAlleleLikelihoodMap { } public void clear() { + allelesSet.clear(); alleles.clear(); likelihoodReadMap.clear(); } From a1093ad230f1c24e4b9c2bf0e622d43ff2138e05 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Tue, 21 May 2013 15:35:43 -0400 Subject: [PATCH 09/99] Optimization for ActiveRegion.removeAll -- Previous version took a Collection to remove, and called ArrayList.removeAll() on this collection to remove reads from the ActiveRegion. This can be very slow when there are lots of reads, as ArrayList.removeAll ultimately calls indexOf() that searches through the list calling equals() on each element. New version takes a set, and uses an iterator on the list to remove() from the iterator any read that is in the set. Given that we were already iterating over the list of reads to update the read span, this algorithm is actually simpler and faster than the previous one. -- Update HaplotypeCaller filterReadsInRegion to use a Set not a List. -- Expanded the unit tests a bit for ActiveRegion.removeAll --- .../haplotypecaller/HaplotypeCaller.java | 11 ++++------- .../sting/utils/activeregion/ActiveRegion.java | 14 +++++++++----- .../activeregion/ActiveRegionUnitTest.java | 18 +++++++++++++++--- 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index fd8a1968b..24499def8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -678,7 +678,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In if (dontGenotype) return NO_CALLS; // user requested we not proceed // filter out reads from genotyping which fail mapping quality based criteria - final List filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); + final Collection filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); final Map> perSampleFilteredReadList = splitReadsBySample( filteredReads ); if( assemblyResult.regionForGenotyping.size() == 0 ) { return NO_CALLS; } // no reads remain after filtering so nothing else to do! @@ -918,17 +918,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In activeRegion.addAll(DownsamplingUtils.levelCoverageByPosition(ReadUtils.sortReadsByCoordinate(readsToUse), maxReadsInRegionPerSample, minReadsPerAlignmentStart)); } - private List filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { - final List readsToRemove = new ArrayList<>(); -// logger.info("Filtering non-passing regions: n incoming " + activeRegion.getReads().size()); + private Set filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { + final Set readsToRemove = new LinkedHashSet<>(); for( final GATKSAMRecord rec : activeRegion.getReads() ) { if( rec.getReadLength() < MIN_READ_LENGTH || rec.getMappingQuality() < 20 || BadMateFilter.hasBadMate(rec) || (keepRG != null && !rec.getReadGroup().getId().equals(keepRG)) ) { readsToRemove.add(rec); -// logger.info("\tremoving read " + rec + " len " + rec.getReadLength()); } } activeRegion.removeAll( readsToRemove ); -// logger.info("Filtered non-passing regions: n remaining " + activeRegion.getReads().size()); return readsToRemove; } @@ -938,7 +935,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In return getToolkit().getGenomeLocParser().createGenomeLoc(activeRegion.getExtendedLoc().getContig(), padLeft, padRight); } - private Map> splitReadsBySample( final List reads ) { + private Map> splitReadsBySample( final Collection reads ) { final Map> returnMap = new HashMap>(); for( final String sample : samplesList) { List readList = returnMap.get( sample ); diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java index 2f4c1b55d..7f2fe6833 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java @@ -336,13 +336,17 @@ public class ActiveRegion implements HasGenomeLocation { /** * Remove all of the reads in readsToRemove from this active region - * @param readsToRemove the collection of reads we want to remove + * @param readsToRemove the set of reads we want to remove */ - public void removeAll( final Collection readsToRemove ) { - reads.removeAll(readsToRemove); + public void removeAll( final Set readsToRemove ) { + final Iterator it = reads.iterator(); spanIncludingReads = extendedLoc; - for ( final GATKSAMRecord read : reads ) { - spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); + while ( it.hasNext() ) { + final GATKSAMRecord read = it.next(); + if ( readsToRemove.contains(read) ) + it.remove(); + else + spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); } } diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java index ad5fd3642..0f9b8531a 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java @@ -144,7 +144,7 @@ public class ActiveRegionUnitTest extends BaseTest { } @Test(enabled = !DEBUG, dataProvider = "ActiveRegionReads") - public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) { + public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) throws Exception { final GenomeLoc expectedSpan = loc.union(genomeLocParser.createGenomeLoc(read)); final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0); @@ -176,19 +176,31 @@ public class ActiveRegionUnitTest extends BaseTest { Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.emptyList()); + region.removeAll(Collections.emptySet()); Assert.assertEquals(region.getReads(), Collections.singletonList(read)); Assert.assertEquals(region.size(), 1); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.singletonList(read)); + region.removeAll(Collections.singleton(read)); Assert.assertEquals(region.getReads(), Collections.emptyList()); Assert.assertEquals(region.size(), 0); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), loc); Assert.assertTrue(region.equalExceptReads(region2)); + + final GATKSAMRecord read2 = (GATKSAMRecord)read.clone(); + read2.setReadName(read.getReadName() + ".clone"); + + for ( final GATKSAMRecord readToKeep : Arrays.asList(read, read2)) { + region.addAll(Arrays.asList(read, read2)); + final GATKSAMRecord readToDiscard = readToKeep == read ? read2 : read; + region.removeAll(Collections.singleton(readToDiscard)); + Assert.assertEquals(region.getReads(), Arrays.asList(readToKeep)); + Assert.assertEquals(region.size(), 1); + Assert.assertEquals(region.getExtendedLoc(), loc); + } } // ----------------------------------------------------------------------------------------------- From 010034a65024a3c6c2b89f8a68c34efae881c59a Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Tue, 21 May 2013 16:01:57 -0400 Subject: [PATCH 10/99] Optimization/bugfix for PerReadAlleleLikelihoodMap -- Add() call had a misplaced map.put call, so that we were always putting the result of get() back into the map, when what we really intended was to only put the value back in if the original get() resulted in a null and so initialized the result --- .../sting/utils/genotyper/PerReadAlleleLikelihoodMap.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index c8bb7ff79..f253fc9c9 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -66,8 +66,8 @@ public class PerReadAlleleLikelihoodMap { if (likelihoodMap == null){ // LinkedHashMap will ensure iterating through alleles will be in consistent order likelihoodMap = new LinkedHashMap<>(); + likelihoodReadMap.put(read,likelihoodMap); } - likelihoodReadMap.put(read,likelihoodMap); likelihoodMap.put(a,likelihood); From 881b2b50abe450f5d28a3d26d5b2a63f826687e0 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Tue, 21 May 2013 18:19:23 -0400 Subject: [PATCH 11/99] Optimized counting of filtered records by filter. Don't map class to counts in the ReadMetrics (necessitating 2 HashMap lookups for every increment). Instead, wrap the ReadFilters with a counting version and then set those counts only when updating global metrics. --- .../sting/gatk/ReadMetrics.java | 23 +++----- .../filters/CountingFilteringIterator.java | 31 ++++++++--- .../sting/gatk/ReadMetricsUnitTest.java | 53 ++++++++++++++++++- 3 files changed, 84 insertions(+), 23 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java index f73e7ccd5..29372abcd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java +++ b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java @@ -42,7 +42,7 @@ public class ReadMetrics implements Cloneable { private long nReads; // keep track of filtered records by filter type (class) - private Map filterCounter = new HashMap<>(); + private Map filterCounter = new HashMap<>(); /** * Combines these metrics with a set of other metrics, storing the results in this class. @@ -51,9 +51,9 @@ public class ReadMetrics implements Cloneable { public synchronized void incrementMetrics(ReadMetrics metrics) { nRecords += metrics.nRecords; nReads += metrics.nReads; - for(Map.Entry counterEntry: metrics.filterCounter.entrySet()) { - Class counterType = counterEntry.getKey(); - long newValue = (filterCounter.containsKey(counterType) ? filterCounter.get(counterType) : 0) + counterEntry.getValue(); + for(Map.Entry counterEntry: metrics.filterCounter.entrySet()) { + final String counterType = counterEntry.getKey(); + final long newValue = (filterCounter.containsKey(counterType) ? filterCounter.get(counterType) : 0) + counterEntry.getValue(); filterCounter.put(counterType, newValue); } } @@ -78,21 +78,12 @@ public class ReadMetrics implements Cloneable { } - public void incrementFilter(SamRecordFilter filter) { - long c = 0; - if ( filterCounter.containsKey(filter.getClass()) ) { - c = filterCounter.get(filter.getClass()); - } - - filterCounter.put(filter.getClass(), c + 1L); + public void setFilterCount(final String filter, final long count) { + filterCounter.put(filter, count); } public Map getCountsByFilter() { - final TreeMap sortedCounts = new TreeMap<>(); - for(Map.Entry counterEntry: filterCounter.entrySet()) { - sortedCounts.put(counterEntry.getKey().getSimpleName(),counterEntry.getValue()); - } - return sortedCounts; + return new TreeMap<>(filterCounter); } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java index 6c926e3cf..1942fc19a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java @@ -31,9 +31,7 @@ import net.sf.samtools.util.CloseableIterator; import net.sf.samtools.util.CloserUtil; import org.broadinstitute.sting.gatk.ReadMetrics; -import java.util.Collection; -import java.util.Iterator; -import java.util.NoSuchElementException; +import java.util.*; /** * Filtering Iterator which takes a filter and an iterator and iterates @@ -44,9 +42,27 @@ public class CountingFilteringIterator implements CloseableIterator { private final ReadMetrics globalRuntimeMetrics; private final ReadMetrics privateRuntimeMetrics; private final Iterator iterator; - private final Collection filters; + private final List filters = new ArrayList<>(); private SAMRecord next = null; + // wrapper around ReadFilters to count the number of filtered reads + private final class CountingReadFilter extends ReadFilter { + protected final ReadFilter readFilter; + protected long counter = 0L; + + public CountingReadFilter(final ReadFilter readFilter) { + this.readFilter = readFilter; + } + + @Override + public boolean filterOut(final SAMRecord record) { + final boolean result = readFilter.filterOut(record); + if ( result ) + counter++; + return result; + } + } + /** * Constructor * @@ -58,7 +74,8 @@ public class CountingFilteringIterator implements CloseableIterator { this.globalRuntimeMetrics = metrics; privateRuntimeMetrics = new ReadMetrics(); this.iterator = iterator; - this.filters = filters; + for ( final ReadFilter filter : filters ) + this.filters.add(new CountingReadFilter(filter)); next = getNextRecord(); } @@ -97,8 +114,11 @@ public class CountingFilteringIterator implements CloseableIterator { public void close() { CloserUtil.close(iterator); + // update the global metrics with all the data we collected here globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics); + for ( final CountingReadFilter filter : filters ) + globalRuntimeMetrics.setFilterCount(filter.readFilter.getClass().getSimpleName(), filter.counter); } /** @@ -117,7 +137,6 @@ public class CountingFilteringIterator implements CloseableIterator { boolean filtered = false; for(SamRecordFilter filter: filters) { if(filter.filterOut(record)) { - privateRuntimeMetrics.incrementFilter(filter); filtered = true; break; } diff --git a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java index 32fd35d95..3225a128c 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java @@ -34,7 +34,6 @@ import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider; import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; -import org.broadinstitute.sting.gatk.datasources.providers.ShardDataProvider; import org.broadinstitute.sting.gatk.datasources.reads.*; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.executive.WindowMaker; @@ -263,6 +262,43 @@ public class ReadMetricsUnitTest extends BaseTest { Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); } + @Test + public void testFilteredCounts() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final List filters = new ArrayList<>(); + filters.add(new EveryTenthReadFilter()); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + filters, + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10); + } + class DummyLocusWalker extends LocusWalker { @Override public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { @@ -318,4 +354,19 @@ public class ReadMetricsUnitTest extends BaseTest { return 0; } } + + private final class EveryTenthReadFilter extends ReadFilter { + + private int myCounter = 0; + + @Override + public boolean filterOut(final SAMRecord record) { + if ( ++myCounter == 10 ) { + myCounter = 0; + return true; + } + + return false; + } + } } \ No newline at end of file From d167743852085aaef28d3d202e97805263a14b53 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 10 May 2013 11:09:53 -0400 Subject: [PATCH 12/99] Archived banded logless PairHMM BandedHMM --------- -- An implementation of a linear runtime, linear memory usage banded logless PairHMM. Thought about 50% faster than current PairHMM, this implementation will be superceded by the GraphHMM when it becomes available. The implementation is being archived for future reference Useful infrastructure changes ----------------------------- -- Split PairHMM into a N2MemoryPairHMM that allows smarter implementation to not allocate the double[][] matrices if they don't want, which was previously occurring in the base class PairHMM -- Added functionality (controlled by private static boolean) to write out likelihood call information to a file from inside of LikelihoodCalculationEngine for using in unit or performance testing. Added example of 100kb of data to private/testdata. Can be easily read in with the PairHMMTestData class. -- PairHMM now tracks the number of possible cell evaluations, and the LoglessCachingPairHMM updates the nCellsEvaluated so we can see how many cells are saved by the caching calculation. --- .../haplotypecaller/HaplotypeCaller.java | 1 + .../LikelihoodCalculationEngine.java | 47 ++++- .../sting/utils/pairhmm/LoglessPairHMM.java | 16 +- .../sting/utils/pairhmm/PairHMMTestData.java | 162 ++++++++++++++++++ .../sting/utils/pairhmm/Log10PairHMM.java | 2 +- .../sting/utils/pairhmm/N2MemoryPairHMM.java | 91 ++++++++++ .../sting/utils/pairhmm/PairHMM.java | 58 +------ 7 files changed, 316 insertions(+), 61 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java create mode 100644 public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 24499def8..2ebfbcee9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -867,6 +867,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Override public void onTraversalDone(Integer result) { + likelihoodCalculationEngine.close(); logger.info("Ran local assembly on " + result + " active regions"); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index d5d5f3c09..ca1877142 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -48,20 +48,27 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import net.sf.samtools.SAMUtils; import org.apache.log4j.Logger; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.haplotype.HaplotypeScoreComparator; -import org.broadinstitute.sting.utils.pairhmm.*; +import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; +import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.variantcontext.Allele; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.PrintStream; import java.util.*; public class LikelihoodCalculationEngine { @@ -71,6 +78,7 @@ public class LikelihoodCalculationEngine { private final byte constantGCP; private final double log10globalReadMismappingRate; private final boolean DEBUG; + private final PairHMM.HMM_IMPLEMENTATION hmmType; private final ThreadLocal pairHMM = new ThreadLocal() { @@ -86,6 +94,10 @@ public class LikelihoodCalculationEngine { } }; + private final static boolean WRITE_LIKELIHOODS_TO_FILE = false; + private final static String LIKELIHOODS_FILENAME = "likelihoods.txt"; + private final PrintStream likelihoodsStream; + /** * The expected rate of random sequencing errors for a read originating from its true haplotype. * @@ -113,12 +125,28 @@ public class LikelihoodCalculationEngine { this.constantGCP = constantGCP; this.DEBUG = debug; this.log10globalReadMismappingRate = log10globalReadMismappingRate; + + if ( WRITE_LIKELIHOODS_TO_FILE ) { + try { + likelihoodsStream = new PrintStream(new FileOutputStream(new File(LIKELIHOODS_FILENAME))); + } catch ( FileNotFoundException e ) { + throw new RuntimeException(e); + } + } else { + likelihoodsStream = null; + } } public LikelihoodCalculationEngine() { this((byte)10, false, PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING, -3); } + public void close() { + if ( likelihoodsStream != null ) likelihoodsStream.close(); + } + + + /** * Initialize our pairHMM with parameters appropriate to the haplotypes and reads we're going to evaluate * @@ -205,6 +233,17 @@ public class LikelihoodCalculationEngine { final double log10l = pairHMM.get().computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), read.getReadBases(), readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype); + if ( WRITE_LIKELIHOODS_TO_FILE ) { + likelihoodsStream.printf("%s %s %s %s %s %s %f%n", + haplotype.getBaseString(), + new String(read.getReadBases()), + SAMUtils.phredToFastq(readQuals), + SAMUtils.phredToFastq(readInsQuals), + SAMUtils.phredToFastq(readDelQuals), + SAMUtils.phredToFastq(overallGCP), + log10l); + } + if ( haplotype.isNonReference() ) bestNonReflog10L = Math.max(bestNonReflog10L, log10l); else diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java index ab2a5bb2a..184a2689d 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java @@ -55,7 +55,7 @@ import org.broadinstitute.sting.utils.QualityUtils; * User: rpoplin, carneiro * Date: 10/16/12 */ -public final class LoglessPairHMM extends PairHMM { +public final class LoglessPairHMM extends N2MemoryPairHMM { protected static final double INITIAL_CONDITION = Math.pow(2, 1020); protected static final double INITIAL_CONDITION_LOG10 = Math.log10(INITIAL_CONDITION); @@ -99,8 +99,13 @@ public final class LoglessPairHMM extends PairHMM { } } - if ( ! constantsAreInitialized || recacheReadValues ) - initializeProbabilities(insertionGOP, deletionGOP, overallGCP); + if ( ! constantsAreInitialized || recacheReadValues ) { + initializeProbabilities(transition, insertionGOP, deletionGOP, overallGCP); + + // note that we initialized the constants + constantsAreInitialized = true; + } + initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex); for (int i = 1; i < paddedReadLength; i++) { @@ -159,7 +164,7 @@ public final class LoglessPairHMM extends PairHMM { "overallGCP != null" }) @Ensures("constantsAreInitialized") - private void initializeProbabilities(final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { + protected static void initializeProbabilities(final double[][] transition, final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { for (int i = 0; i < insertionGOP.length; i++) { final int qualIndexGOP = Math.min(insertionGOP[i] + deletionGOP[i], Byte.MAX_VALUE); transition[i+1][matchToMatch] = QualityUtils.qualToProb((byte) qualIndexGOP); @@ -169,9 +174,6 @@ public final class LoglessPairHMM extends PairHMM { transition[i+1][matchToDeletion] = QualityUtils.qualToErrorProb(deletionGOP[i]); transition[i+1][deletionToDeletion] = QualityUtils.qualToErrorProb(overallGCP[i]); } - - // note that we initialized the constants - constantsAreInitialized = true; } /** diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java new file mode 100644 index 000000000..3d8137ecf --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java @@ -0,0 +1,162 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import net.sf.samtools.SAMUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.*; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.zip.GZIPInputStream; + +/** + * Useful single class carrying test data for PairHMMs (for use in benchmarking and unit tests) + * + * User: depristo + * Date: 5/12/13 + * Time: 3:52 PM + * To change this template use File | Settings | File Templates. + */ +public class PairHMMTestData { + public final String ref; + private final String read; + public final byte[] baseQuals, insQuals, delQuals, gcp; + public final double log10l; + + PairHMMTestData(String ref, String read, byte[] baseQuals, byte[] insQuals, byte[] delQuals, byte[] gcp, double log10l) { + this.ref = ref; + this.read = read; + this.baseQuals = baseQuals; + this.insQuals = insQuals; + this.delQuals = delQuals; + this.gcp = gcp; + this.log10l = log10l; + } + + PairHMMTestData(String ref, String read, final byte qual) { + this.ref = ref; + this.read = read; + this.baseQuals = this.insQuals = this.delQuals = Utils.dupBytes(qual, read.length()); + this.gcp = Utils.dupBytes((byte)10, read.length()); + this.log10l = -1; + } + + public double runHMM(final PairHMM hmm) { + hmm.initialize(getRead().length(), ref.length()); + return hmm.computeReadLikelihoodGivenHaplotypeLog10(ref.getBytes(), getRead().getBytes(), + baseQuals, insQuals, delQuals, gcp, true); + } + + @Override + public String toString() { + return "Info{" + + "ref='" + ref + '\'' + + ", read='" + getRead() + '\'' + + ", log10l=" + log10l + + '}'; + } + + public static void runHMMs(final PairHMM hmm, final List data, final boolean runSingly) { + if ( runSingly ) { + for ( final PairHMMTestData datum : data ) + datum.runHMM(hmm); + } else { + // running in batch mode + final PairHMMTestData first = data.get(0); + int maxHaplotypeLen = calcMaxHaplotypeLen(data); + hmm.initialize(first.getRead().length(), maxHaplotypeLen); + for ( final PairHMMTestData datum : data ) { + hmm.computeReadLikelihoodGivenHaplotypeLog10(datum.ref.getBytes(), datum.getRead().getBytes(), + datum.baseQuals, datum.insQuals, datum.delQuals, datum.gcp, false); + + } + } + } + + public static int calcMaxHaplotypeLen(final List data) { + int maxHaplotypeLen = 0; + for ( final PairHMMTestData datum : data ) + maxHaplotypeLen = Math.max(maxHaplotypeLen, datum.ref.length()); + return maxHaplotypeLen; + } + + public static Map> readLikelihoods(final File file) throws IOException { + final Map> results = new LinkedHashMap<>(); + + InputStream in = new FileInputStream(file); + if ( file.getName().endsWith(".gz") ) { + in = new GZIPInputStream(in); + } + + for ( final String line : new XReadLines(in) ) { + final String[] parts = line.split(" "); + final PairHMMTestData info = new PairHMMTestData( + parts[0], parts[1], + SAMUtils.fastqToPhred(parts[2]), + SAMUtils.fastqToPhred(parts[3]), + SAMUtils.fastqToPhred(parts[4]), + SAMUtils.fastqToPhred(parts[5]), + Double.parseDouble(parts[6])); + + if ( ! results.containsKey(info.read) ) { + results.put(info.read, new LinkedList()); + } + final List byHap = results.get(info.read); + byHap.add(info); + } + + return results; + } + + public String getRead() { + return read; + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java index ab6c321e8..ddc1a4559 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java @@ -38,7 +38,7 @@ import java.util.Arrays; * User: rpoplin, carneiro * Date: 3/1/12 */ -public final class Log10PairHMM extends PairHMM { +public final class Log10PairHMM extends N2MemoryPairHMM { /** * Should we use exact log10 calculation (true), or an approximation (false)? */ diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java new file mode 100644 index 000000000..a091a0716 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java @@ -0,0 +1,91 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.MathUtils; + +import java.util.Arrays; + +/** + * Superclass for PairHMM that want to use a full read x haplotype matrix for their match, insertion, and deletion matrix + * + * User: rpoplin + * Date: 10/16/12 + */ +abstract class N2MemoryPairHMM extends PairHMM { + protected double[][] transition = null; // The transition probabilities cache + protected double[][] prior = null; // The prior probabilities cache + protected double[][] matchMatrix = null; + protected double[][] insertionMatrix = null; + protected double[][] deletionMatrix = null; + + /** + * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths + * + * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding. + * + * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM + * @param readMaxLength the max length of reads we want to use with this PairHMM + */ + public void initialize( final int readMaxLength, final int haplotypeMaxLength ) { + super.initialize(readMaxLength, haplotypeMaxLength); + + matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + } + + /** + * Print out the core hmm matrices for debugging + */ + protected void dumpMatrices() { + dumpMatrix("matchMetricArray", matchMatrix); + dumpMatrix("insertionMatrix", insertionMatrix); + dumpMatrix("deletionMatrix", deletionMatrix); + } + + /** + * Print out in a human readable form the matrix for debugging + * @param name the name of this matrix + * @param matrix the matrix of values + */ + @Requires({"name != null", "matrix != null"}) + private void dumpMatrix(final String name, final double[][] matrix) { + System.out.printf("%s%n", name); + for ( int i = 0; i < matrix.length; i++) { + System.out.printf("\t%s[%d]", name, i); + for ( int j = 0; j < matrix[i].length; j++ ) { + if ( Double.isInfinite(matrix[i][j]) ) + System.out.printf(" %15s", String.format("%f", matrix[i][j])); + else + System.out.printf(" % 15.5e", matrix[i][j]); + } + System.out.println(); + } + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java index 6b57a1354..85ac97f95 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java @@ -40,8 +40,6 @@ import java.util.Arrays; public abstract class PairHMM { protected final static Logger logger = Logger.getLogger(PairHMM.class); - protected double[][] transition = null; // The transition probabilities cache - protected double[][] prior = null; // The prior probabilities cache protected boolean constantsAreInitialized = false; protected byte[] previousHaplotypeBases; @@ -52,12 +50,9 @@ public abstract class PairHMM { /* PairHMM as implemented for the UnifiedGenotyper. Uses log10 sum functions accurate to only 1E-4 */ ORIGINAL, /* Optimized version of the PairHMM which caches per-read computations and operations in real space to avoid costly sums of log10'ed likelihoods */ - LOGLESS_CACHING + LOGLESS_CACHING, } - protected double[][] matchMatrix = null; - protected double[][] insertionMatrix = null; - protected double[][] deletionMatrix = null; protected int maxHaplotypeLength, maxReadLength; protected int paddedMaxReadLength, paddedMaxHaplotypeLength; protected int paddedReadLength, paddedHaplotypeLength; @@ -82,18 +77,12 @@ public abstract class PairHMM { paddedMaxReadLength = readMaxLength + 1; paddedMaxHaplotypeLength = haplotypeMaxLength + 1; - matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - previousHaplotypeBases = null; constantsAreInitialized = false; initialized = true; } - - /** * Compute the total probability of read arising from haplotypeBases given base substitution, insertion, and deletion * probabilities. @@ -152,44 +141,15 @@ public abstract class PairHMM { * To be overloaded by subclasses to actually do calculation for #computeReadLikelihoodGivenHaplotypeLog10 */ @Requires({"readBases.length == readQuals.length", "readBases.length == insertionGOP.length", "readBases.length == deletionGOP.length", - "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) + "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) protected abstract double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases, - final byte[] readBases, - final byte[] readQuals, - final byte[] insertionGOP, - final byte[] deletionGOP, - final byte[] overallGCP, - final int hapStartIndex, - final boolean recacheReadValues ); - - /** - * Print out the core hmm matrices for debugging - */ - protected void dumpMatrices() { - dumpMatrix("matchMetricArray", matchMatrix); - dumpMatrix("insertionMatrix", insertionMatrix); - dumpMatrix("deletionMatrix", deletionMatrix); - } - - /** - * Print out in a human readable form the matrix for debugging - * @param name the name of this matrix - * @param matrix the matrix of values - */ - @Requires({"name != null", "matrix != null"}) - private void dumpMatrix(final String name, final double[][] matrix) { - System.out.printf("%s%n", name); - for ( int i = 0; i < matrix.length; i++) { - System.out.printf("\t%s[%d]", name, i); - for ( int j = 0; j < matrix[i].length; j++ ) { - if ( Double.isInfinite(matrix[i][j]) ) - System.out.printf(" %15s", String.format("%f", matrix[i][j])); - else - System.out.printf(" % 15.5e", matrix[i][j]); - } - System.out.println(); - } - } + final byte[] readBases, + final byte[] readQuals, + final byte[] insertionGOP, + final byte[] deletionGOP, + final byte[] overallGCP, + final int hapStartIndex, + final boolean recacheReadValues ); /** * Compute the first position at which two haplotypes differ From da21924b44342321150d1f05ee9f6850969ffb44 Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Wed, 22 May 2013 14:22:54 -0400 Subject: [PATCH 13/99] Make the missing targets output never use stdout Problem -------- Diagnose Targets is outputting missing intervals to stdout if the argument -missing is not provided Solution -------- Make it NOT default to stdout [Delivers #50386741] --- .../gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java index ebe2192b4..a6cbc1da3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java @@ -116,7 +116,7 @@ final class ThresHolder { @Argument(fullName = "quality_status_threshold", shortName = "stQ", doc = "The proportion of the loci needed for calling POOR_QUALITY", required = false) public double qualityStatusThreshold = 0.50; - @Output(fullName = "missing_intervals", shortName = "missing", doc ="Produces a file with the intervals that don't pass filters", required = false) + @Output(fullName = "missing_intervals", shortName = "missing", defaultToStdout = false, doc ="Produces a file with the intervals that don't pass filters", required = false) public PrintStream missingTargets = null; public final List locusMetricList = new LinkedList(); From 85905dba9238a0f558d939d66e23eaca758ccf1d Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Thu, 23 May 2013 15:15:56 -0400 Subject: [PATCH 14/99] Bugfix for GGA mode in UG silently ignoring indels -- Started by Mark. Finished up by Ryan. -- GGA mode still respected glm argument for SNP and INDEL models, so that you would silently fail to genotype indels at all if the -glm INDEL wasn't provided, but you'd still emit the sites, so you'd see records in the VCF but all alleles would be no calls. -- https://www.pivotaltracker.com/story/show/48924339 for more information -- [resolves #48924339] --- .../genotyper/UnifiedGenotyperEngine.java | 38 +++++++++---------- ...perGeneralPloidySuite1IntegrationTest.java | 2 +- 2 files changed, 18 insertions(+), 22 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java index 3380efcc9..fc11706e5 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java @@ -83,6 +83,9 @@ public class UnifiedGenotyperEngine { public static final double HUMAN_SNP_HETEROZYGOSITY = 1e-3; public static final double HUMAN_INDEL_HETEROZYGOSITY = 1e-4; + private static final int SNP_MODEL = 0; + private static final int INDEL_MODEL = 1; + public enum OUTPUT_MODE { /** produces calls only at variant sites */ EMIT_VARIANTS_ONLY, @@ -693,13 +696,13 @@ public class UnifiedGenotyperEngine { } private void determineGLModelsToUse() { - String modelPrefix = ""; if ( !UAC.GLmodel.name().contains(GPSTRING) && UAC.samplePloidy != GATKVariantContextUtils.DEFAULT_PLOIDY ) modelPrefix = GPSTRING; - if ( UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { - modelPrefix += UAC.GLmodel.name().toUpperCase().replaceAll("BOTH",""); + // GGA mode => must initialize both the SNP and indel models + if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"SNP")); modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"INDEL")); } @@ -712,31 +715,24 @@ public class UnifiedGenotyperEngine { private List getGLModelsToUse(final RefMetaDataTracker tracker, final ReferenceContext refContext, final AlignmentContext rawContext) { - if ( UAC.GenotypingMode != GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) return modelsToUse; + if ( modelsToUse.size() != 2 ) + throw new IllegalStateException("GGA mode assumes that we have initialized both the SNP and indel models but found " + modelsToUse); + // if we're genotyping given alleles then we need to choose the model corresponding to the variant type requested - final List GGAmodel = new ArrayList(1); final VariantContext vcInput = getVCFromAllelesRod(tracker, refContext, rawContext.getLocation(), false, logger, UAC.alleles); - if ( vcInput == null ) - return GGAmodel; // no work to be done - if ( vcInput.isSNP() ) { - // use the SNP model unless the user chose INDEL mode only - if ( modelsToUse.size() == 2 || modelsToUse.get(0).name().endsWith("SNP") ) - GGAmodel.add(modelsToUse.get(0)); + if ( vcInput == null ) { + return Collections.emptyList(); // no work to be done + } else if ( vcInput.isSNP() ) { + return Collections.singletonList(modelsToUse.get(SNP_MODEL)); + } else if ( vcInput.isIndel() || vcInput.isMixed() ) { + return Collections.singletonList(modelsToUse.get(INDEL_MODEL)); + } else { + return Collections.emptyList(); // No support for other types yet } - else if ( vcInput.isIndel() || vcInput.isMixed() ) { - // use the INDEL model unless the user chose SNP mode only - if ( modelsToUse.size() == 2 ) - GGAmodel.add(modelsToUse.get(1)); - else if ( modelsToUse.get(0).name().endsWith("INDEL") ) - GGAmodel.add(modelsToUse.get(0)); - } - // No support for other types yet - - return GGAmodel; } /** diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index 88506fda3..1cfc41a27 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -74,7 +74,7 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_GGA_Pools() { - executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "3f7d763c654f1d708323f369ea4a099b"); + executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "ceb105e3db0f2b993e3d725b0d60b6a3"); } @Test(enabled = true) From f1affa9fbb061720a7b67d2e26083006f206aeb7 Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Tue, 28 May 2013 14:58:50 -0400 Subject: [PATCH 15/99] Turn off downsampling for DiagnoseTargets Diagnose targets should never be downsampled. (and I didn't know there was a default downsampling going on for locus walkers) --- .../walkers/diagnostics/diagnosetargets/DiagnoseTargets.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java index 4bd08294b..bde324e3c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java @@ -52,6 +52,7 @@ import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; @@ -110,6 +111,7 @@ import java.util.*; @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @By(value = DataSource.READS) @PartitionBy(PartitionType.INTERVAL) +@Downsample(by = DownsampleType.NONE) public class DiagnoseTargets extends LocusWalker { private static final String AVG_INTERVAL_DP_KEY = "IDP"; From 38e765f00d340bc600fa6645b29f69b5551fc445 Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Tue, 28 May 2013 15:29:43 -0400 Subject: [PATCH 16/99] Somehow the index of exampleDBSNP.vcf was missing This was missed when we added all the indices of our testdata --- public/testdata/exampleDBSNP.vcf.idx | Bin 0 -> 330 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 public/testdata/exampleDBSNP.vcf.idx diff --git a/public/testdata/exampleDBSNP.vcf.idx b/public/testdata/exampleDBSNP.vcf.idx new file mode 100644 index 0000000000000000000000000000000000000000..7239e366f87c568e1698e7959710af9af88be441 GIT binary patch literal 330 zcmZ9I-Acni5QRrXe4A|cC)q@=Vm4|(Q)#!wyReC?DaIyjQt6HF;FV7x_(muCKZs8cdCz)cB0Hn_*N{t5LtL+I5Xa zJ&X@(R7mgCOW$g7u_*4m*ZL8@-!0Fo`TSzc@tsa=U1o_~&a?6+E3V{qd7C8#P%jLk zI0`WjFb;^%`G4C&ic(}Nz@9ZQ&U_MJ!f70Y;0Wb`x=QAGQlyzR62?eGsPA)1V@%l} tGMz082?TQxc`DE9#$dp{6S$!J9p0vooVwUnKhxOvkMn71%4r@>{RLQ8OlJT9 literal 0 HcmV?d00001 From a7cb599945889d9011e5d71d9ffcee94c4bba5ee Mon Sep 17 00:00:00 2001 From: David Roazen Date: Tue, 28 May 2013 16:52:28 -0400 Subject: [PATCH 17/99] Require a minimum dcov value of 200 for Locus and ActiveRegion walkers when downsampling to coverage -Throw a UserException if a Locus or ActiveRegion walker is run with -dcov < 200, since low dcov values can result in problematic downsampling artifacts for locus-based traversals. -Read-based traversals continue to have no minimum for -dcov, since dcov for read traversals controls the number of reads per alignment start position, and even a dcov value of 1 might be safe/desirable in some circumstances. -Also reorganize the global downsampling defaults so that they are specified as annotations to the Walker, LocusWalker, and ActiveRegionWalker classes rather than as constants in the DownsamplingMethod class. -The default downsampling settings have not been changed: they are still -dcov 1000 for Locus and ActiveRegion walkers, and -dt NONE for all other walkers. --- .../HaplotypeCallerIntegrationTest.java | 2 +- .../sting/gatk/GenomeAnalysisEngine.java | 3 +- .../gatk/downsampling/DownsamplingMethod.java | 35 ++++++--------- .../gatk/walkers/ActiveRegionWalker.java | 2 + .../sting/gatk/walkers/LocusWalker.java | 2 + .../sting/gatk/walkers/Walker.java | 2 + .../reads/DownsamplerBenchmark.java | 4 +- .../DownsamplingIntegrationTest.java | 44 +++++++++++++++++++ 8 files changed, 68 insertions(+), 26 deletions(-) create mode 100644 public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 2d4223e5c..91e80b45c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -101,7 +101,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerInsertionOnEdgeOfContig() { - HCTest(CEUTRIO_MT_TEST_BAM, "-dcov 90 -L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); + HCTest(CEUTRIO_MT_TEST_BAM, "-L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); } private void HCTestIndelQualityScores(String bam, String args, String md5) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index 314de29c7..3a8431dca 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -463,9 +463,8 @@ public class GenomeAnalysisEngine { DownsamplingMethod commandLineMethod = argCollection.getDownsamplingMethod(); DownsamplingMethod walkerMethod = WalkerManager.getDownsamplingMethod(walker); - DownsamplingMethod defaultMethod = DownsamplingMethod.getDefaultDownsamplingMethod(walker); - DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : (walkerMethod != null ? walkerMethod : defaultMethod); + DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : walkerMethod; method.checkCompatibilityWithWalker(walker); return method; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java index 5aa27608d..8e92b1ff3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java @@ -61,20 +61,10 @@ public class DownsamplingMethod { public static final DownsampleType DEFAULT_DOWNSAMPLING_TYPE = DownsampleType.BY_SAMPLE; /** - * Default target coverage for locus-based traversals + * Don't allow dcov values below this threshold for locus-based traversals (ie., Locus + * and ActiveRegion walkers), as they can result in problematic downsampling artifacts */ - public static final int DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE = 1000; - - /** - * Default downsampling method for locus-based traversals - */ - public static final DownsamplingMethod DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD = - new DownsamplingMethod(DEFAULT_DOWNSAMPLING_TYPE, DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE, null); - - /** - * Default downsampling method for read-based traversals - */ - public static final DownsamplingMethod DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD = NONE; + public static final int MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS = 200; public DownsamplingMethod( DownsampleType type, Integer toCoverage, Double toFraction ) { @@ -118,6 +108,16 @@ public class DownsamplingMethod { if ( isLocusTraversal && type == DownsampleType.ALL_READS && toCoverage != null ) { throw new UserException("Downsampling to coverage with the ALL_READS method for locus-based traversals (eg., LocusWalkers) is not currently supported (though it is supported for ReadWalkers)."); } + + // For locus traversals, ensure that the dcov value (if present) is not problematically low + if ( isLocusTraversal && type != DownsampleType.NONE && toCoverage != null && + toCoverage < MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS ) { + throw new UserException(String.format("Locus-based traversals (ie., Locus and ActiveRegion walkers) require " + + "a minimum -dcov value of %d when downsampling to coverage. Values less " + + "than this can produce problematic downsampling artifacts while providing " + + "only insignificant improvements in memory usage in most cases.", + MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS)); + } } public String toString() { @@ -139,13 +139,4 @@ public class DownsamplingMethod { return builder.toString(); } - - public static DownsamplingMethod getDefaultDownsamplingMethod( Walker walker ) { - if ( walker instanceof LocusWalker || walker instanceof ActiveRegionWalker ) { - return DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD; - } - else { - return DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD; - } - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java index 9595b8f42..962f81d0d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java @@ -31,6 +31,7 @@ import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; @@ -57,6 +58,7 @@ import java.util.*; @PartitionBy(PartitionType.READ) @ActiveRegionTraversalParameters(extension=50,maxRegion=1500) @ReadFilters({UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class, MappingQualityUnavailableFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class ActiveRegionWalker extends Walker { /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java index 788bf11f9..9997723b8 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.walkers; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.DuplicateReadFilter; import org.broadinstitute.sting.gatk.filters.FailsVendorQualityCheckFilter; import org.broadinstitute.sting.gatk.filters.NotPrimaryAlignmentFilter; @@ -44,6 +45,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; @Requires({DataSource.READS,DataSource.REFERENCE}) @PartitionBy(PartitionType.LOCUS) @ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class,DuplicateReadFilter.class,FailsVendorQualityCheckFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class LocusWalker extends Walker { // Do we actually want to operate on the context? diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java index 522414c00..40485596d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java @@ -29,6 +29,7 @@ import net.sf.samtools.SAMSequenceDictionary; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.MalformedReadFilter; import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.samples.Sample; @@ -50,6 +51,7 @@ import java.util.List; */ @ReadFilters(MalformedReadFilter.class) @PartitionBy(PartitionType.NONE) +@Downsample(by = DownsampleType.NONE) @BAQMode(QualityMode = BAQ.QualityMode.OVERWRITE_QUALS, ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @BQSRMode(ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @DocumentedGATKFeature(groupName = "Uncategorized", extraDocs = {CommandLineGATK.class}) diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java index 00389be97..25c71d570 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java @@ -26,7 +26,9 @@ package org.broadinstitute.sting.gatk.datasources.reads; import com.google.caliper.Param; +import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountLoci; /** @@ -86,7 +88,7 @@ public class DownsamplerBenchmark extends ReadProcessingBenchmark { }, PER_SAMPLE { @Override - DownsamplingMethod create() { return DownsamplingMethod.getDefaultDownsamplingMethod(new CountLoci()); } + DownsamplingMethod create() { return WalkerManager.getDownsamplingMethod(LocusWalker.class); } }; abstract DownsamplingMethod create(); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java new file mode 100644 index 000000000..85f9169da --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java @@ -0,0 +1,44 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.downsampling; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.Test; + +public class DownsamplingIntegrationTest extends WalkerTest { + + @Test + public void testDetectLowDcovValueWithLocusTraversal() { + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T CountLoci -R " + publicTestDir + "exampleFASTA.fasta -I " + publicTestDir + "exampleBAM.bam -o %s " + + "-dcov " + (DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS - 1), + 1, + UserException.class + ); + executeTest("testDetectLowDcovValueWithLocusTraversal", spec); + } +} From eb206e9f716f73a7fe6b6a69d8fb02fa4a08b05b Mon Sep 17 00:00:00 2001 From: David Roazen Date: Wed, 29 May 2013 14:43:57 -0400 Subject: [PATCH 18/99] Fix confusing log output from the engine -ReadShardBalancer was printing out an extra "Loading BAM index data for next contig" message at traversal end, which was confusing users and making the GATK look stupid. Suppress the extraneous message, and reword the log messages to be less confusing. -Improve log message output when initializing the shard iterator in GenomeAnalysisEngine. Don't mention BAMs when the are none, and say "Preparing for traversal" rather than mentioning the meaningless-for-users concept of "shard strategy" -These log messages are needed because the operations they surround might take a while under some circumstances, and the user should know that the GATK is actively doing something rather than being hung. --- .../broadinstitute/sting/gatk/GenomeAnalysisEngine.java | 6 ++++-- .../sting/gatk/datasources/reads/ReadShardBalancer.java | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index 3a8431dca..de7439b85 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -293,9 +293,11 @@ public class GenomeAnalysisEngine { // create the output streams initializeOutputStreams(microScheduler.getOutputTracker()); - logger.info("Creating shard strategy for " + readsDataSource.getReaderIDs().size() + " BAM files"); + // Initializing the shard iterator / BAM schedule might take some time, so let the user know vaguely what's going on + logger.info("Preparing for traversal" + + (readsDataSource.getReaderIDs().size() > 0 ? String.format(" over %d BAM files", readsDataSource.getReaderIDs().size()) : "")); Iterable shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals); - logger.info("Done creating shard strategy"); + logger.info("Done preparing for traversal"); // execute the microscheduler, storing the results return microScheduler.execute(this.walker, shardStrategy); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java index 7772dbc1f..dc1b80efd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java @@ -177,7 +177,9 @@ public class ReadShardBalancer extends ShardBalancer { currentContigFilePointer = null; List nextContigFilePointers = new ArrayList(); - logger.info("Loading BAM index data for next contig"); + if ( filePointers.hasNext() ) { + logger.info("Loading BAM index data"); + } while ( filePointers.hasNext() ) { @@ -215,8 +217,8 @@ public class ReadShardBalancer extends ShardBalancer { } if ( currentContigFilePointer != null ) { - logger.info("Done loading BAM index data for next contig"); - logger.debug(String.format("Next contig FilePointer: %s", currentContigFilePointer)); + logger.info("Done loading BAM index data"); + logger.debug(String.format("Next FilePointer: %s", currentContigFilePointer)); } } From a5a68c09fac06d2ebc3ee6b9b94b31bded7cbfdd Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Wed, 29 May 2013 14:42:15 -0400 Subject: [PATCH 19/99] Fix for the "Removed too many insertions, header is now negative" bug in ReduceReads. The problem ultimately was that ReadUtils.readStartsWithInsertion() ignores leading hard/softclips, but ReduceReads does not. So I refactored that method to include a boolean argument as to whether or not clips should be ignored. Also rebased so that return type is no longer a Pair. Added unit test to cover this situation. --- .../reducereads/HeaderElement.java | 2 +- .../reducereads/SlidingWindow.java | 2 +- .../reducereads/SlidingWindowUnitTest.java | 19 ++++++++++ .../sting/utils/sam/ReadUtils.java | 35 ++++++++++--------- 4 files changed, 40 insertions(+), 18 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java index 38b9e957b..ba2c2ae56 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java @@ -207,7 +207,7 @@ public class HeaderElement { public void removeInsertionToTheRight() { this.insertionsToTheRight--; if (insertionsToTheRight < 0) - throw new ReviewedStingException("Removed too many insertions, header is now negative!"); + throw new ReviewedStingException("Removed too many insertions, header is now negative at position " + location); } public boolean hasInsertionToTheRight() { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java index 8843d6270..0425af3df 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java @@ -1199,7 +1199,7 @@ public class SlidingWindow { } // Special case for leading insertions before the beginning of the sliding read - if ( ReadUtils.readStartsWithInsertion(read).getFirst() && (readStart == headerStart || headerStart < 0) ) { + if ( (readStart == headerStart || headerStart < 0) && ReadUtils.readStartsWithInsertion(read.getCigar(), false) != null ) { // create a new first element to the window header with no bases added header.addFirst(new HeaderElement(readStart - 1)); // this allows the first element (I) to look at locationIndex - 1 when we update the header and do the right thing diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java index 56ad02084..c9bb2f084 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java @@ -89,6 +89,25 @@ public class SlidingWindowUnitTest extends BaseTest { return variantRegionBitset; } + ////////////////////////////////////////////////////////////////////////////////////// + //// Test for leading softclips immediately followed by an insertion in the CIGAR //// + ////////////////////////////////////////////////////////////////////////////////////// + + @Test(enabled = true) + public void testLeadingClipThenInsertion() { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 10); + read.setReadBases(Utils.dupBytes((byte) 'A', 10)); + read.setBaseQualities(Utils.dupBytes((byte)30, 10)); + read.setMappingQuality(30); + read.setCigarString("2S2I6M"); + + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 1); + slidingWindow.addRead(read); + Pair, CompressionStash> result = slidingWindow.close(null); + + } + ////////////////////////////////////////////////////////////////////////////////////// //// This section tests the findVariantRegions() method and related functionality //// ////////////////////////////////////////////////////////////////////////////////////// diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index 0db3aa043..5b15fdd1b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -424,9 +424,9 @@ public class ReadUtils { // clipping the left tail and first base is insertion, go to the next read coordinate // with the same reference coordinate. Advance to the next cigar element, or to the // end of the read if there is no next element. - Pair firstElementIsInsertion = readStartsWithInsertion(cigar); - if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion.getFirst()) - readCoord = Math.min(firstElementIsInsertion.getSecond().getLength(), cigar.getReadLength() - 1); + final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar); + if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null) + readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1); return readCoord; } @@ -595,25 +595,28 @@ public class ReadUtils { } /** - * Checks if a read starts with an insertion. It looks beyond Hard and Soft clips - * if there are any. - * - * @param read - * @return A pair with the answer (true/false) and the element or null if it doesn't exist + * @see #readStartsWithInsertion(net.sf.samtools.Cigar, boolean) with ignoreClipOps set to true */ - public static Pair readStartsWithInsertion(GATKSAMRecord read) { - return readStartsWithInsertion(read.getCigar()); + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead) { + return readStartsWithInsertion(cigarForRead, true); } - public static Pair readStartsWithInsertion(final Cigar cigar) { - for (CigarElement cigarElement : cigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.INSERTION) - return new Pair(true, cigarElement); + /** + * Checks if a read starts with an insertion. + * + * @param cigarForRead the CIGAR to evaluate + * @param ignoreClipOps should we ignore S and H operators when evaluating whether an I operator is at the beginning? + * @return the element if it's a leading insertion or null otherwise + */ + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreClipOps) { + for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) { + if ( cigarElement.getOperator() == CigarOperator.INSERTION ) + return cigarElement; - else if (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) + else if ( !ignoreClipOps || (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) ) break; } - return new Pair(false, null); + return null; } /** From 61af37d0d25a553eab872d86ebeed38927398ff3 Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Wed, 29 May 2013 16:17:43 -0400 Subject: [PATCH 20/99] Create a new normalDistributionLog10 function that is unit tested for use in the VQSR. --- .../GaussianMixtureModel.java | 5 +-- .../broadinstitute/sting/utils/MathUtils.java | 42 +++++++++++++++++-- .../activeregion/BandPassActivityProfile.java | 3 +- .../sting/utils/MathUtilsUnitTest.java | 17 ++++++++ .../activeregion/ActivityProfileUnitTest.java | 2 +- 5 files changed, 59 insertions(+), 10 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java index eef9da84a..92b0d4df2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.variantrecalibration; import Jama.Matrix; -import cern.jet.random.Normal; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.MathUtils; @@ -243,12 +242,10 @@ public class GaussianMixtureModel { public Double evaluateDatumInOneDimension( final VariantDatum datum, final int iii ) { if(datum.isNull[iii]) { return null; } - final Normal normal = new Normal(0.0, 1.0, null); final double[] pVarInGaussianLog10 = new double[gaussians.size()]; int gaussianIndex = 0; for( final MultivariateGaussian gaussian : gaussians ) { - normal.setState( gaussian.mu[iii], gaussian.sigma.get(iii, iii) ); - pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + Math.log10( normal.pdf( datum.annotations[iii] ) ); + pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + MathUtils.normalDistributionLog10(gaussian.mu[iii], gaussian.sigma.get(iii, iii), datum.annotations[iii]); } return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index 38c131bc6..c8cf9d6a1 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -63,6 +63,8 @@ public class MathUtils { */ public final static double LOG10_P_OF_ZERO = -1000000.0; public final static double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + private final static double NATURAL_LOG_OF_TEN = Math.log(10.0); + private final static double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI); static { log10Cache = new double[LOG10_CACHE_SIZE]; @@ -301,12 +303,46 @@ public class MathUtils { return 1; } - public static double NormalDistribution(final double mean, final double sd, final double x) { - double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI)); - double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd))); + /** + * Calculate f(x) = Normal(x | mu = mean, sigma = sd) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + public static double normalDistribution(final double mean, final double sd, final double x) { + final double a = 1.0 / (sd * SQUARE_ROOT_OF_TWO_TIMES_PI); + final double b = Math.exp(-1.0 * (square(x - mean) / (2.0 * square(sd)))); return a * b; } + /** + * Calculate f(x) = log10 ( Normal(x | mu = mean, sigma = sd) ) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + + public static double normalDistributionLog10(final double mean, final double sd, final double x) { + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); + final double a = -1.0 * Math.log10(sd * SQUARE_ROOT_OF_TWO_TIMES_PI); + final double b = -1.0 * (square(x - mean) / (2.0 * square(sd))) / NATURAL_LOG_OF_TEN; + return a + b; + } + + /** + * Calculate f(x) = x^2 + * @param x the value to square + * @return x * x + */ + public static double square(final double x) { + return x * x; + } + /** * Calculates the log10 of the binomial coefficient. Designed to prevent * overflows even with very large numbers. diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java index f2bc86dfc..f352bc332 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java @@ -31,7 +31,6 @@ import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocSortedSet; import org.broadinstitute.sting.utils.MathUtils; -import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; @@ -108,7 +107,7 @@ public class BandPassActivityProfile extends ActivityProfile { final int bandSize = 2 * filterSize + 1; final double[] kernel = new double[bandSize]; for( int iii = 0; iii < bandSize; iii++ ) { - kernel[iii] = MathUtils.NormalDistribution(filterSize, sigma, iii); + kernel[iii] = MathUtils.normalDistribution(filterSize, sigma, iii); } return MathUtils.normalizeFromRealSpace(kernel); } diff --git a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java index 27af8ec68..e4c74a0ad 100644 --- a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils; +import cern.jet.random.Normal; import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -398,4 +399,20 @@ public class MathUtilsUnitTest extends BaseTest { Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0,-3.0,2.0}, new double[]{6.0,7.0,8.0}),10.0,1e-3); Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0}, new double[]{6.0}),1.0,1e-3); } + + @Test + public void testNormalDistribution() { + final double requiredPrecision = 1E-10; + + final Normal n = new Normal(0.0, 1.0, null); + for( final double mu : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + for( final double sigma : new double[]{1.2, 3.0, 5.8977} ) { + for( final double x : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + n.setState(mu, sigma); + Assert.assertEquals(n.pdf(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision); + Assert.assertEquals(Math.log10(n.pdf(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision); + } + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java index 9be250b8e..f208815f7 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java @@ -450,7 +450,7 @@ public class ActivityProfileUnitTest extends BaseTest { private double[] makeGaussian(final int mean, final int range, final double sigma) { final double[] gauss = new double[range]; for( int iii = 0; iii < range; iii++ ) { - gauss[iii] = MathUtils.NormalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; + gauss[iii] = MathUtils.normalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; } return gauss; } From b16de45ce436fda30ef425f80b30e25a65c9f741 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 30 May 2013 16:53:23 -0400 Subject: [PATCH 21/99] Command-line read filters are now applied before Walker default filters -- This allows us to use -rf ReassignMappingQuality to reassign mapping qualities to 60 *before* the BQSR filters them out with MappingQualityUnassignedFilter. -- delivers #50222251 --- .../sting/gatk/GenomeAnalysisEngine.java | 9 +++- .../gatk/EngineFeaturesIntegrationTest.java | 48 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index de7439b85..6fa1b741c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -344,11 +344,18 @@ public class GenomeAnalysisEngine { * @return A collection of available filters. */ public Collection createFilters() { - final List filters = WalkerManager.getReadFilters(walker,this.getFilterManager()); + final List filters = new LinkedList<>(); + + // First add the user requested filters if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0) filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList)); for(final String filterName: this.getArguments().readFilters) filters.add(this.getFilterManager().createByName(filterName)); + + // now add the walker default filters. This ordering is critical important if + // users need to apply filters that fix up reads that would be removed by default walker filters + filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager())); + return Collections.unmodifiableList(filters); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 8d0874ea1..c60c6430c 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -26,12 +26,20 @@ package org.broadinstitute.sting.gatk; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.ReadFilters; +import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.ErrorThrowing; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.PrintStream; import java.util.Arrays; /** @@ -126,4 +134,44 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { executeTest(cfg.toString(), spec); } } + + // -------------------------------------------------------------------------------- + // + // Test that read filters are being applied in the order we expect + // + // -------------------------------------------------------------------------------- + + @ReadFilters({MappingQualityUnavailableFilter.class}) + public static class DummyReadWalkerWithMapqUnavailableFilter extends ReadWalker { + @Output + PrintStream out; + + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 1; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return value + sum; + } + + @Override + public void onTraversalDone(Integer result) { + out.println(result); + } + } + + @Test(enabled = true) + public void testUserReadFilterAppliedBeforeWalker() { + WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam" + + " -T DummyReadWalkerWithMapqUnavailableFilter -o %s -L MT -rf ReassignMappingQuality", + 1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab")); + executeTest("testUserReadFilterAppliedBeforeWalker", spec); + } } \ No newline at end of file From 199476eae1431653a2d71f3552f7f89c6e7478af Mon Sep 17 00:00:00 2001 From: Chris Hartl Date: Thu, 30 May 2013 22:48:37 -0400 Subject: [PATCH 22/99] Three squashed commits: 1) Add in checks for input parameters in MathUtils method. I was careful to use the bottom-level methods whenever possible, so that parameters don't needlessly go through multiple checks (so for instance, the parameters n and k for a binomial aren't checked on log10binomial, but rather in the log10binomialcoefficient subroutine). This addresses JIRA GSA-767 Unit tests pass (we'll let bamboo deal with the integrations) 2) Address reviewer comments (change UserExceptions to IllegalArgumentExceptions). 3) .isWellFormedDouble() tests for infinity and not strictly positive infinity. Allow negative-infinity values for log10sumlog10 (as these just correspond to p=0). After these commits, unit and integration tests now pass, and GSA-767 is done. rebase and fix conflict: public/java/src/org/broadinstitute/sting/utils/MathUtils.java --- build.xml | 2 +- .../broadinstitute/sting/utils/MathUtils.java | 42 ++++++++++++++++--- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/build.xml b/build.xml index 2e9df4d5e..d9b37f4de 100644 --- a/build.xml +++ b/build.xml @@ -39,7 +39,7 @@ - + diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index c8cf9d6a1..49157a206 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -31,6 +31,7 @@ import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import java.lang.IllegalArgumentException; import java.math.BigDecimal; import java.util.*; @@ -205,15 +206,16 @@ public class MathUtils { } /** - * Converts a real space array of probabilities into a log10 array + * Converts a real space array of numbers (typically probabilities) into a log10 array * * @param prRealSpace * @return */ public static double[] toLog10(final double[] prRealSpace) { double[] log10s = new double[prRealSpace.length]; - for (int i = 0; i < prRealSpace.length; i++) + for (int i = 0; i < prRealSpace.length; i++) { log10s[i] = Math.log10(prRealSpace[i]); + } return log10s; } @@ -229,6 +231,9 @@ public class MathUtils { return maxValue; for (int i = start; i < finish; i++) { + if ( Double.isNaN(log10p[i]) || log10p[i] == Double.POSITIVE_INFINITY ) { + throw new IllegalArgumentException("log10p: Values must be non-infinite and non-NAN"); + } sum += Math.pow(10.0, log10p[i] - maxValue); } @@ -311,8 +316,12 @@ public class MathUtils { * @return a well-formed double */ public static double normalDistribution(final double mean, final double sd, final double x) { - final double a = 1.0 / (sd * SQUARE_ROOT_OF_TWO_TIMES_PI); - final double b = Math.exp(-1.0 * (square(x - mean) / (2.0 * square(sd)))); + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); + double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI)); + double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd))); return a * b; } @@ -359,6 +368,13 @@ public class MathUtils { * @see #binomialCoefficient(int, int) with log10 applied to result */ public static double log10BinomialCoefficient(final int n, final int k) { + if ( n < 0 ) { + throw new IllegalArgumentException("n: Must have non-negative number of trials"); + } + if ( k > n || k < 0 ) { + throw new IllegalArgumentException("k: Must have non-negative number of successes, and no more successes than number of trials"); + } + return log10Factorial(n) - log10Factorial(k) - log10Factorial(n - k); } @@ -382,6 +398,8 @@ public class MathUtils { * @see #binomialProbability(int, int, double) with log10 applied to result */ public static double log10BinomialProbability(final int n, final int k, final double log10p) { + if ( log10p > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be 0 or less"); double log10OneMinusP = Math.log10(1 - Math.pow(10, log10p)); return log10BinomialCoefficient(n, k) + log10p * k + log10OneMinusP * (n - k); } @@ -441,10 +459,20 @@ public class MathUtils { * @return */ public static double log10MultinomialCoefficient(final int n, final int[] k) { + if ( n < 0 ) + throw new IllegalArgumentException("n: Must have non-negative number of trials"); double denominator = 0.0; + int sum = 0; for (int x : k) { + if ( x < 0 ) + throw new IllegalArgumentException("x element of k: Must have non-negative observations of group"); + if ( x > n ) + throw new IllegalArgumentException("x element of k, n: Group observations must be bounded by k"); denominator += log10Factorial(x); + sum += x; } + if ( sum != n ) + throw new IllegalArgumentException("k and n: Sum of observations in multinomial must sum to total number of trials"); return log10Factorial(n) - denominator; } @@ -459,9 +487,11 @@ public class MathUtils { */ public static double log10MultinomialProbability(final int n, final int[] k, final double[] log10p) { if (log10p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); double log10Prod = 0.0; for (int i = 0; i < log10p.length; i++) { + if ( log10p[i] > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be <= 0"); log10Prod += log10p[i] * k[i]; } return log10MultinomialCoefficient(n, k) + log10Prod; @@ -504,7 +534,7 @@ public class MathUtils { */ public static double multinomialProbability(final int[] k, final double[] p) { if (p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); int n = 0; double[] log10P = new double[p.length]; From b5b9d745a7b5b12927414e104b6259e25dd26508 Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Wed, 22 May 2013 10:35:19 -0400 Subject: [PATCH 23/99] New implementation of the GGA mode in the HaplotypeCaller -- We now inject the given alleles into the reference haplotype and add them to the graph. -- Those paths are read off of the graph and then evaluated with the appropriate marginalization for GGA mode. -- This unifies how Smith-Waterman is performed between discovery and GGA modes. -- Misc minor cleanup in several places. --- .../haplotypecaller/DeBruijnAssembler.java | 24 ++- .../haplotypecaller/GenotypingEngine.java | 103 +++---------- .../haplotypecaller/HaplotypeCaller.java | 38 +++-- .../LikelihoodCalculationEngine.java | 7 +- .../haplotypecaller/LocalAssemblyEngine.java | 143 +++++------------- .../readthreading/ReadThreadingAssembler.java | 9 +- .../readthreading/ReadThreadingGraph.java | 4 +- .../indels/PairHMMIndelErrorModel.java | 6 +- .../DeBruijnAssemblerUnitTest.java | 6 +- ...lexAndSymbolicVariantsIntegrationTest.java | 4 +- .../HaplotypeCallerIntegrationTest.java | 2 +- .../LocalAssemblyEngineUnitTest.java | 7 + .../ReadThreadingAssemblerUnitTest.java | 2 +- .../broadinstitute/sting/utils/MathUtils.java | 12 +- .../genotyper/PerReadAlleleLikelihoodMap.java | 3 +- .../sting/utils/haplotype/Haplotype.java | 34 +---- 16 files changed, 135 insertions(+), 269 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 48972dfd5..3c0642f83 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -77,6 +77,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { private final static int NUM_PATHS_PER_GRAPH = 25; private static final int KMER_OVERLAP = 5; // the additional size of a valid chunk of sequence, used to string together k-mers private static final int GRAPH_KMER_STEP = 6; + private static final int GGA_MODE_ARTIFICIAL_COUNTS = 1000; private final int minKmer; private final int onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms; @@ -92,8 +93,8 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } @Override - protected List assemble(final List reads, final Haplotype refHaplotype) { - final List graphs = new LinkedList(); + protected List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { + final List graphs = new LinkedList<>(); final int maxKmer = ReadUtils.getMaxReadLength(reads) - KMER_OVERLAP - 1; if( maxKmer < minKmer) { @@ -106,7 +107,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { continue; if ( debug ) logger.info("Creating de Bruijn graph for " + kmer + " kmer using " + reads.size() + " reads"); - DeBruijnGraph graph = createGraphFromSequences( reads, kmer, refHaplotype); + DeBruijnGraph graph = createGraphFromSequences(reads, kmer, refHaplotype, activeAlleleHaplotypes); if( graph != null ) { // graphs that fail during creation ( for example, because there are cycles in the reference graph ) will show up here as a null graph object // do a series of steps to clean up the raw assembly graph to make it analysis-ready if ( debugGraphTransformations ) graph.printGraph(new File("unpruned.dot"), pruneFactor); @@ -133,7 +134,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } @Requires({"reads != null", "kmerLength > 0", "refHaplotype != null"}) - protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype ) { + protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { final DeBruijnGraph graph = new DeBruijnGraph(kmerLength); final DeBruijnGraphBuilder builder = new DeBruijnGraphBuilder(graph); @@ -142,8 +143,8 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { // something went wrong, so abort right now with a null graph return null; - // now go through the graph already seeded with the reference sequence and add the read kmers to it - if ( ! addReadKmersToGraph(builder, reads) ) + // now go through the graph already seeded with the reference sequence and add the read kmers to it as well as the artificial GGA haplotypes + if ( ! addReadKmersToGraph(builder, reads, activeAlleleHaplotypes) ) // some problem was detected adding the reads to the graph, return null to indicate we failed return null; @@ -156,11 +157,20 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { * * @param builder a debruijn graph builder to add the read kmers to * @param reads a non-null list of reads whose kmers we want to add to the graph + * @param activeAlleleHaplotypes a list of haplotypes to add to the graph for GGA mode * @return true if we successfully added the read kmers to the graph without corrupting it in some way */ - protected boolean addReadKmersToGraph(final DeBruijnGraphBuilder builder, final List reads) { + protected boolean addReadKmersToGraph(final DeBruijnGraphBuilder builder, final List reads, final List activeAlleleHaplotypes) { final int kmerLength = builder.getKmerSize(); + // First pull kmers out of the artificial GGA haplotypes and throw them on the graph + for( final Haplotype haplotype : activeAlleleHaplotypes ) { + final int end = haplotype.length() - kmerLength; + for( int start = 0; start < end; start++ ) { + builder.addKmerPairFromSeqToGraph( haplotype.getBases(), start, GGA_MODE_ARTIFICIAL_COUNTS ); + } + } + // Next pull kmers out of every read and throw them on the graph for( final GATKSAMRecord read : reads ) { final byte[] sequence = read.getReadBases(); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 419ea378f..9bb456230 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -71,7 +71,7 @@ public class GenotypingEngine { private final boolean DEBUG; private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; - private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + private final static List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied private final VariantAnnotatorEngine annotationEngine; private final MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger; @@ -162,8 +162,8 @@ public class GenotypingEngine { final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted - final Set calledHaplotypes = new HashSet(); - final List returnCalls = new ArrayList(); + final Set calledHaplotypes = new HashSet<>(); + final List returnCalls = new ArrayList<>(); for( final int loc : startPosKeySet ) { if( loc >= activeRegionWindow.getStart() && loc <= activeRegionWindow.getStop() ) { // genotyping an event inside this active region final List eventsAtThisLoc = getVCsAtThisLocation(haplotypes, loc, activeAllelesToGenotype); @@ -183,7 +183,7 @@ public class GenotypingEngine { if( eventsAtThisLoc.size() != mergedVC.getAlternateAlleles().size() ) { throw new ReviewedStingException("Record size mismatch! Something went wrong in the merging of alleles."); } - final Map mergeMap = new LinkedHashMap(); + final Map mergeMap = new LinkedHashMap<>(); mergeMap.put(null, mergedVC.getReference()); // the reference event (null) --> the reference allele for(int iii = 0; iii < mergedVC.getAlternateAlleles().size(); iii++) { mergeMap.put(eventsAtThisLoc.get(iii), mergedVC.getAlternateAllele(iii)); // BUGBUG: This is assuming that the order of alleles is the same as the priority list given to simpleMerge function @@ -244,7 +244,7 @@ public class GenotypingEngine { if ( in_GGA_mode ) startPosKeySet.clear(); - cleanUpSymbolicUnassembledEvents( haplotypes ); + //cleanUpSymbolicUnassembledEvents( haplotypes ); // We don't make symbolic alleles so this isn't needed currently if ( !in_GGA_mode ) { // run the event merger if we're not in GGA mode final boolean mergedAnything = crossHaplotypeEventMerger.merge(haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc); @@ -267,7 +267,7 @@ public class GenotypingEngine { * @return the list of the sources of vcs in the same order */ private List makePriorityList(final List vcs) { - final List priorityList = new LinkedList(); + final List priorityList = new LinkedList<>(); for ( final VariantContext vc : vcs ) priorityList.add(vc.getSource()); return priorityList; } @@ -276,7 +276,7 @@ public class GenotypingEngine { final int loc, final List activeAllelesToGenotype) { // the overlapping events to merge into a common reference view - final List eventsAtThisLoc = new ArrayList(); + final List eventsAtThisLoc = new ArrayList<>(); if( activeAllelesToGenotype.isEmpty() ) { for( final Haplotype h : haplotypes ) { @@ -292,7 +292,7 @@ public class GenotypingEngine { if( compVC.getStart() == loc ) { int alleleCount = 0; for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - List alleleSet = new ArrayList(2); + List alleleSet = new ArrayList<>(2); alleleSet.add(compVC.getReference()); alleleSet.add(compAltAllele); final String vcSourceName = "Comp" + compCount + "Allele" + alleleCount; @@ -348,7 +348,7 @@ public class GenotypingEngine { final Map> perSampleFilteredReadList, final VariantContext call ) { - final Map returnMap = new LinkedHashMap(); + final Map returnMap = new LinkedHashMap<>(); final GenomeLoc callLoc = parser.createGenomeLoc(call); for( final Map.Entry sample : perSampleReadMap.entrySet() ) { final PerReadAlleleLikelihoodMap likelihoodMap = new PerReadAlleleLikelihoodMap(); @@ -384,7 +384,7 @@ public class GenotypingEngine { // TODO - split into input haplotypes and output haplotypes as not to share I/O arguments @Requires("haplotypes != null") protected static void cleanUpSymbolicUnassembledEvents( final List haplotypes ) { - final List haplotypesToRemove = new ArrayList(); + final List haplotypesToRemove = new ArrayList<>(); for( final Haplotype h : haplotypes ) { for( final VariantContext vc : h.getEventMap().getVariantContexts() ) { if( vc.isSymbolic() ) { @@ -407,7 +407,7 @@ public class GenotypingEngine { final Map> alleleMapper, final double downsamplingFraction ) { - final Map alleleReadMap = new LinkedHashMap(); + final Map alleleReadMap = new LinkedHashMap<>(); for( final Map.Entry haplotypeReadMapEntry : haplotypeReadMap.entrySet() ) { // for each sample final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); for( final Map.Entry> alleleMapperEntry : alleleMapper.entrySet() ) { // for each output allele @@ -430,7 +430,7 @@ public class GenotypingEngine { } protected static Map> createAlleleMapper( final Map mergeMap, final Map> eventMap ) { - final Map> alleleMapper = new LinkedHashMap>(); + final Map> alleleMapper = new LinkedHashMap<>(); for( final Map.Entry entry : mergeMap.entrySet() ) { alleleMapper.put(entry.getValue(), eventMap.get(new Event(entry.getKey()))); } @@ -441,100 +441,33 @@ public class GenotypingEngine { @Ensures({"result.size() == eventsAtThisLoc.size() + 1"}) protected static Map> createEventMapper( final int loc, final List eventsAtThisLoc, final List haplotypes ) { - final Map> eventMapper = new LinkedHashMap>(eventsAtThisLoc.size()+1); - VariantContext refVC = eventsAtThisLoc.get(0); // the genome loc is the only safe thing to pull out of this VC because ref/alt pairs might change reference basis - eventMapper.put(new Event(null), new ArrayList()); + final Map> eventMapper = new LinkedHashMap<>(eventsAtThisLoc.size()+1); + final Event refEvent = new Event(null); + eventMapper.put(refEvent, new ArrayList()); for( final VariantContext vc : eventsAtThisLoc ) { eventMapper.put(new Event(vc), new ArrayList()); } - final List undeterminedHaplotypes = new ArrayList(haplotypes.size()); for( final Haplotype h : haplotypes ) { - if( h.isArtificialHaplotype() && loc == h.getArtificialAllelePosition() ) { - final List alleles = new ArrayList(2); - alleles.add(h.getArtificialRefAllele()); - alleles.add(h.getArtificialAltAllele()); - final Event artificialVC = new Event( (new VariantContextBuilder()).source("artificialHaplotype") - .alleles(alleles) - .loc(refVC.getChr(), refVC.getStart(), refVC.getStart() + h.getArtificialRefAllele().length() - 1).make() ); - if( eventMapper.containsKey(artificialVC) ) { - eventMapper.get(artificialVC).add(h); - } - } else if( h.getEventMap().get(loc) == null ) { // no event at this location so let's investigate later - undeterminedHaplotypes.add(h); + if( h.getEventMap().get(loc) == null ) { + eventMapper.get(refEvent).add(h); } else { - boolean haplotypeIsDetermined = false; for( final VariantContext vcAtThisLoc : eventsAtThisLoc ) { if( h.getEventMap().get(loc).hasSameAllelesAs(vcAtThisLoc) ) { eventMapper.get(new Event(vcAtThisLoc)).add(h); - haplotypeIsDetermined = true; break; } } - - if( !haplotypeIsDetermined ) - undeterminedHaplotypes.add(h); } } - for( final Haplotype h : undeterminedHaplotypes ) { - Event matchingEvent = new Event(null); - for( final Map.Entry> eventToTest : eventMapper.entrySet() ) { - // don't test against the reference allele - if( eventToTest.getKey().equals(new Event(null)) ) - continue; - - // only try to disambiguate for alleles that have had haplotypes previously assigned above - if( eventToTest.getValue().isEmpty() ) - continue; - - final Haplotype artificialHaplotype = eventToTest.getValue().get(0); - if( isSubSetOf(artificialHaplotype.getEventMap(), h.getEventMap(), true) ) { - matchingEvent = eventToTest.getKey(); - break; - } - } - - eventMapper.get(matchingEvent).add(h); - } - return eventMapper; } - protected static boolean isSubSetOf(final Map subset, final Map superset, final boolean resolveSupersetToSubset) { - - for ( final Map.Entry fromSubset : subset.entrySet() ) { - final VariantContext fromSuperset = superset.get(fromSubset.getKey()); - if ( fromSuperset == null ) - return false; - - List supersetAlleles = fromSuperset.getAlternateAlleles(); - if ( resolveSupersetToSubset ) - supersetAlleles = resolveAlternateAlleles(fromSubset.getValue().getReference(), fromSuperset.getReference(), supersetAlleles); - - if ( !supersetAlleles.contains(fromSubset.getValue().getAlternateAllele(0)) ) - return false; - } - - return true; - } - - private static List resolveAlternateAlleles(final Allele targetReference, final Allele actualReference, final List currentAlleles) { - if ( targetReference.length() <= actualReference.length() ) - return currentAlleles; - - final List newAlleles = new ArrayList(currentAlleles.size()); - final byte[] extraBases = Arrays.copyOfRange(targetReference.getBases(), actualReference.length(), targetReference.length()); - for ( final Allele a : currentAlleles ) { - newAlleles.add(Allele.extend(a, extraBases)); - } - return newAlleles; - } - @Ensures({"result.size() == haplotypeAllelesForSample.size()"}) protected static List findEventAllelesInSample( final List eventAlleles, final List haplotypeAlleles, final List haplotypeAllelesForSample, final List> alleleMapper, final List haplotypes ) { if( haplotypeAllelesForSample.contains(Allele.NO_CALL) ) { return noCall; } - final List eventAllelesForSample = new ArrayList(); + final List eventAllelesForSample = new ArrayList<>(); for( final Allele a : haplotypeAllelesForSample ) { final Haplotype haplotype = haplotypes.get(haplotypeAlleles.indexOf(a)); for( int iii = 0; iii < alleleMapper.size(); iii++ ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 2ebfbcee9..e0a755c7b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -47,6 +47,9 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.arguments.DbsnpArgumentCollection; @@ -433,8 +436,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private final static int MIN_READ_LENGTH = 10; private List samplesList = new ArrayList(); - private final static double LOG_ONE_HALF = -Math.log10(2.0); - private final static double LOG_ONE_THIRD = -Math.log10(3.0); private final List allelesToGenotype = new ArrayList(); private final static Allele FAKE_REF_ALLELE = Allele.create("N", true); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file @@ -603,7 +604,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // if we don't have any data, just abort early return new ActivityProfileState(ref.getLocus(), 0.0); - final List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + final List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied noCall.add(Allele.NO_CALL); final Map splitContexts = AlignmentContextUtils.splitContextBySampleName(context); @@ -625,14 +626,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } } genotypeLikelihoods[AA] += p.getRepresentativeCount() * QualityUtils.qualToProbLog10(qual); - genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD + LOG_ONE_HALF ); - genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD; + genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + MathUtils.LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD + MathUtils.LOG_ONE_HALF ); + genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD; } } genotypes.add( new GenotypeBuilder(sample.getKey()).alleles(noCall).PL(genotypeLikelihoods).make() ); } - final List alleles = new ArrayList(); + final List alleles = new ArrayList<>(); alleles.add( FAKE_REF_ALLELE ); alleles.add( FAKE_ALT_ALLELE ); final VariantCallContext vcOut = UG_engine_simple_genotyper.calculateGenotypes(new VariantContextBuilder("HCisActive!", context.getContig(), context.getLocation().getStart(), context.getLocation().getStop(), alleles).genotypes(genotypes).make(), GenotypeLikelihoodsCalculationModel.Model.INDEL); @@ -746,9 +747,9 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // Create the reference haplotype which is the bases from the reference that make up the active region finalizeActiveRegion(activeRegion); // merge overlapping fragments, clip adapter and low qual tails - final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); final byte[] fullReferenceWithPadding = activeRegion.getActiveRegionReference(referenceReader, REFERENCE_PADDING); final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); + final Haplotype referenceHaplotype = createReferenceHaplotype(activeRegion, paddedReferenceLoc); final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); @@ -760,6 +761,21 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } } + /** + * Helper function to create the reference haplotype out of the active region and a padded loc + * @param activeRegion the active region from which to generate the reference haplotype + * @param paddedReferenceLoc the GenomeLoc which includes padding and shows how big the reference haplotype should be + * @return a non-null haplotype + */ + private Haplotype createReferenceHaplotype(final ActiveRegion activeRegion, final GenomeLoc paddedReferenceLoc) { + final Haplotype refHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); + refHaplotype.setAlignmentStartHapwrtRef(activeRegion.getExtendedLoc().getStart() - paddedReferenceLoc.getStart()); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + return refHaplotype; + } + /** * Trim down the active region to just enough to properly genotype the events among the haplotypes * @@ -791,7 +807,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } // trim down the haplotypes - final Set haplotypeSet = new HashSet(haplotypes.size()); + final Set haplotypeSet = new HashSet<>(haplotypes.size()); for ( final Haplotype h : haplotypes ) { final Haplotype trimmed = h.trim(trimmedActiveRegion.getExtendedLoc()); if ( trimmed != null ) { @@ -802,7 +818,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } // create the final list of trimmed haplotypes - final List trimmedHaplotypes = new ArrayList(haplotypeSet); + final List trimmedHaplotypes = new ArrayList<>(haplotypeSet); // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM Collections.sort( trimmedHaplotypes, new HaplotypeBaseComparator() ); @@ -816,7 +832,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // trim down the reads and add them to the trimmed active region - final List trimmedReads = new ArrayList(originalActiveRegion.getReads().size()); + final List trimmedReads = new ArrayList<>(originalActiveRegion.getReads().size()); for( final GATKSAMRecord read : originalActiveRegion.getReads() ) { final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion( read, trimmedActiveRegion.getExtendedLoc().getStart(), trimmedActiveRegion.getExtendedLoc().getStop() ); if( trimmedActiveRegion.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 ) { @@ -937,7 +953,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } private Map> splitReadsBySample( final Collection reads ) { - final Map> returnMap = new HashMap>(); + final Map> returnMap = new HashMap<>(); for( final String sample : samplesList) { List readList = returnMap.get( sample ); if( readList == null ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index ca1877142..4a1a5993a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -74,7 +74,6 @@ import java.util.*; public class LikelihoodCalculationEngine { private final static Logger logger = Logger.getLogger(LikelihoodCalculationEngine.class); - private static final double LOG_ONE_HALF = -Math.log10(2.0); private final byte constantGCP; private final double log10globalReadMismappingRate; private final boolean DEBUG; @@ -299,7 +298,7 @@ public class LikelihoodCalculationEngine { // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2) // First term is approximated by Jacobian log with table lookup. haplotypeLikelihood += ReadUtils.getMeanRepresentativeReadCount( entry.getKey() ) * - ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + LOG_ONE_HALF ); + ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + MathUtils.LOG_ONE_HALF ); } } haplotypeLikelihoodMatrix[iii][jjj] = haplotypeLikelihood; @@ -397,11 +396,11 @@ public class LikelihoodCalculationEngine { if ( haplotypes.size() == 2 ) return haplotypes; // fast path -- we'll always want to use 2 haplotypes // all of the haplotypes that at least one sample called as one of the most likely - final Set selectedHaplotypes = new HashSet(); + final Set selectedHaplotypes = new HashSet<>(); selectedHaplotypes.add(findReferenceHaplotype(haplotypes)); // ref is always one of the selected // our annoying map from allele -> haplotype - final Map allele2Haplotype = new HashMap(); + final Map allele2Haplotype = new HashMap<>(); for ( final Haplotype h : haplotypes ) { h.setScore(h.isReference() ? Double.MAX_VALUE : 0.0); // set all of the scores to 0 (lowest value) for all non-ref haplotypes allele2Haplotype.put(Allele.create(h, h.isReference()), h); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 20b005b40..3a377409c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -111,7 +111,11 @@ public abstract class LocalAssemblyEngine { * @param refHaplotype the reference haplotype * @return a non-null list of reads */ - protected abstract List assemble(List reads, Haplotype refHaplotype); + protected abstract List assemble(List reads, Haplotype refHaplotype, List activeAlleleHaplotypes); + + protected List assemble(List reads, Haplotype refHaplotype) { + return assemble(reads, refHaplotype, Collections.emptyList()); + } /** * Main entry point into the assembly engine. Build a set of deBruijn graphs out of the provided reference sequence and list of reads @@ -128,8 +132,11 @@ public abstract class LocalAssemblyEngine { if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); } if( pruneFactor < 0 ) { throw new IllegalArgumentException("Pruning factor cannot be negative"); } + // create the list of artificial haplotypes that should be added to the graph for GGA mode + final List activeAlleleHaplotypes = createActiveAlleleHaplotypes(refHaplotype, activeAllelesToGenotype, activeRegion.getExtendedLoc()); + // create the graphs by calling our subclass assemble method - final List graphs = assemble(activeRegion.getReads(), refHaplotype); + final List graphs = assemble(activeRegion.getReads(), refHaplotype, activeAlleleHaplotypes); // do some QC on the graphs for ( final SeqGraph graph : graphs ) { sanityCheckGraph(graph, refHaplotype); } @@ -138,45 +145,53 @@ public abstract class LocalAssemblyEngine { if ( graphWriter != null ) { printGraphs(graphs); } // find the best paths in the graphs and return them as haplotypes - return findBestPaths( graphs, refHaplotype, fullReferenceWithPadding, refLoc, activeAllelesToGenotype, activeRegion.getExtendedLoc() ); + return findBestPaths( graphs, refHaplotype, refLoc, activeRegion.getExtendedLoc() ); } - @Requires({"refWithPadding.length > refHaplotype.getBases().length", "refLoc.containsP(activeRegionWindow)"}) - @Ensures({"result.contains(refHaplotype)"}) - protected List findBestPaths(final List graphs, final Haplotype refHaplotype, final byte[] refWithPadding, final GenomeLoc refLoc, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow) { - // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes - final Set returnHaplotypes = new LinkedHashSet(); - refHaplotype.setAlignmentStartHapwrtRef(activeRegionWindow.getStart() - refLoc.getStart()); - final Cigar c = new Cigar(); - c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); - refHaplotype.setCigar(c); - returnHaplotypes.add( refHaplotype ); - + /** + * Create the list of artificial GGA-mode haplotypes by injecting each of the provided alternate alleles into the reference haplotype + * @param refHaplotype the reference haplotype + * @param activeAllelesToGenotype the list of alternate alleles in VariantContexts + * @param activeRegionWindow the window containing the reference haplotype + * @return a non-null list of haplotypes + */ + private List createActiveAlleleHaplotypes(final Haplotype refHaplotype, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow) { + final Set returnHaplotypes = new LinkedHashSet<>(); final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); - final int activeRegionStop = refHaplotype.getAlignmentStartHapwrtRef() + refHaplotype.getCigar().getReferenceLength(); - // for GGA mode, add the desired allele into the haplotype for( final VariantContext compVC : activeAllelesToGenotype ) { for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { final Haplotype insertedRefHaplotype = refHaplotype.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()); - addHaplotypeForGGA( insertedRefHaplotype, refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, true ); + if( insertedRefHaplotype != null ) { // can be null if the requested allele can't be inserted into the haplotype + returnHaplotypes.add(insertedRefHaplotype); + } } } + return new ArrayList<>(returnHaplotypes); + } + + @Ensures({"result.contains(refHaplotype)"}) + protected List findBestPaths(final List graphs, final Haplotype refHaplotype, final GenomeLoc refLoc, final GenomeLoc activeRegionWindow) { + // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes + final Set returnHaplotypes = new LinkedHashSet<>(); + returnHaplotypes.add( refHaplotype ); + + final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); + for( final SeqGraph graph : graphs ) { final SeqVertex source = graph.getReferenceSourceVertex(); final SeqVertex sink = graph.getReferenceSinkVertex(); if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph); - final KBestPaths pathFinder = new KBestPaths(allowCyclesInKmerGraphToGeneratePaths); + final KBestPaths pathFinder = new KBestPaths<>(allowCyclesInKmerGraphToGeneratePaths); for ( final Path path : pathFinder.getKBestPaths(graph, numBestHaplotypesPerGraph, source, sink) ) { -// logger.info("Found path " + path); Haplotype h = new Haplotype( path.getBases() ); if( !returnHaplotypes.contains(h) ) { final Cigar cigar = path.calculateCigar(refHaplotype.getBases()); if ( cigar == null ) { - // couldn't produce a meaningful alignment of haplotype to reference, fail quitely + // couldn't produce a meaningful alignment of haplotype to reference, fail quietly continue; } else if( cigar.isEmpty() ) { throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() + @@ -197,25 +212,6 @@ public abstract class LocalAssemblyEngine { if ( debug ) logger.info("Adding haplotype " + h.getCigar() + " from debruijn graph with kmer " + graph.getKmerSize()); - - // for GGA mode, add the desired allele into the haplotype if it isn't already present - if( !activeAllelesToGenotype.isEmpty() ) { - final Map eventMap = GenotypingEngine.generateVCsFromAlignment( h, refWithPadding, refLoc, "HCassembly" ); // BUGBUG: need to put this function in a shared place - for( final VariantContext compVC : activeAllelesToGenotype ) { // for GGA mode, add the desired allele into the haplotype if it isn't already present - final VariantContext vcOnHaplotype = eventMap.get(compVC.getStart()); - - // This if statement used to additionally have: - // "|| !vcOnHaplotype.hasSameAllelesAs(compVC)" - // but that can lead to problems downstream when e.g. you are injecting a 1bp deletion onto - // a haplotype that already contains a 1bp insertion (so practically it is reference but - // falls into the bin for the 1bp deletion because we keep track of the artificial alleles). - if( vcOnHaplotype == null ) { - for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - addHaplotypeForGGA( h.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()), refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, false ); - } - } - } - } } } } @@ -238,7 +234,7 @@ public abstract class LocalAssemblyEngine { } } - return new ArrayList(returnHaplotypes); + return new ArrayList<>(returnHaplotypes); } /** @@ -256,71 +252,6 @@ public abstract class LocalAssemblyEngine { return false; } - /** - * Take a haplotype which was generated by injecting an allele into a string of bases and run SW against the reference to determine the variants on the haplotype. - * Unfortunately since this haplotype didn't come from the assembly graph you can't straightforwardly use the bubble traversal algorithm to get this information. - * This is a target for future work as we rewrite the HaplotypeCaller to be more bubble-caller based. - * @param haplotype the candidate haplotype - * @param ref the reference bases to align against - * @param haplotypeList the current list of haplotypes - * @param activeRegionStart the start of the active region in the reference byte array - * @param activeRegionStop the stop of the active region in the reference byte array - * @param FORCE_INCLUSION_FOR_GGA_MODE if true will include in the list even if it already exists - * @return true if the candidate haplotype was successfully incorporated into the haplotype list - */ - @Requires({"ref != null", "ref.length >= activeRegionStop - activeRegionStart"}) - private boolean addHaplotypeForGGA( final Haplotype haplotype, final byte[] ref, final Set haplotypeList, final int activeRegionStart, final int activeRegionStop, final boolean FORCE_INCLUSION_FOR_GGA_MODE ) { - if( haplotype == null ) { return false; } - - final SWPairwiseAlignment swConsensus = new SWPairwiseAlignment( ref, haplotype.getBases(), SWParameterSet.STANDARD_NGS ); - haplotype.setAlignmentStartHapwrtRef( swConsensus.getAlignmentStart2wrt1() ); - - if( swConsensus.getCigar().toString().contains("S") || swConsensus.getCigar().getReferenceLength() < 60 || swConsensus.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - haplotype.setCigar( AlignmentUtils.leftAlignIndel(swConsensus.getCigar(), ref, haplotype.getBases(), swConsensus.getAlignmentStart2wrt1(), 0, true) ); - - final int hapStart = ReadUtils.getReadCoordinateForReferenceCoordinate(haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStart, ReadUtils.ClippingTail.LEFT_TAIL, true); - int hapStop = ReadUtils.getReadCoordinateForReferenceCoordinate( haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStop, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED && activeRegionStop == haplotype.getAlignmentStartHapwrtRef() + haplotype.getCigar().getReferenceLength() ) { - hapStop = activeRegionStop; // contract for getReadCoordinateForReferenceCoordinate function says that if read ends at boundary then it is outside of the clipping goal - } - byte[] newHaplotypeBases; - // extend partial haplotypes to contain the full active region sequence - if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED && hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll(ArrayUtils.addAll(ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), - haplotype.getBases()), - ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop)); - } else if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), ArrayUtils.subarray(haplotype.getBases(), 0, hapStop) ); - } else if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(haplotype.getBases(), hapStart, haplotype.getBases().length), ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop) ); - } else { - newHaplotypeBases = ArrayUtils.subarray(haplotype.getBases(), hapStart, hapStop); - } - - final Haplotype h = new Haplotype( newHaplotypeBases ); - final SWPairwiseAlignment swConsensus2 = new SWPairwiseAlignment( ref, h.getBases(), SWParameterSet.STANDARD_NGS ); - - h.setAlignmentStartHapwrtRef( swConsensus2.getAlignmentStart2wrt1() ); - if ( haplotype.isArtificialHaplotype() ) { - h.setArtificialEvent(haplotype.getArtificialEvent()); - } - if( swConsensus2.getCigar().toString().contains("S") || swConsensus2.getCigar().getReferenceLength() != activeRegionStop - activeRegionStart || swConsensus2.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - h.setCigar( AlignmentUtils.leftAlignIndel(swConsensus2.getCigar(), ref, h.getBases(), swConsensus2.getAlignmentStart2wrt1(), 0, true) ); - - if( FORCE_INCLUSION_FOR_GGA_MODE || !haplotypeList.contains(h) ) { - haplotypeList.add(h); - return true; - } else { - return false; - } - } - protected SeqGraph cleanupSeqGraph(final SeqGraph seqGraph) { if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.1.dot"), pruneFactor); @@ -372,7 +303,6 @@ public abstract class LocalAssemblyEngine { * Perform general QC on the graph to make sure something hasn't gone wrong during assembly * @param graph the graph to check * @param refHaplotype the reference haplotype - * @param */ private void sanityCheckGraph(final BaseGraph graph, final Haplotype refHaplotype) { sanityCheckReferenceGraph(graph, refHaplotype); @@ -383,7 +313,6 @@ public abstract class LocalAssemblyEngine { * * @param graph the graph to check * @param refHaplotype the reference haplotype - * @param */ private void sanityCheckReferenceGraph(final BaseGraph graph, final Haplotype refHaplotype) { if( graph.getReferenceSourceVertex() == null ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index db0ce0880..3d4d38d8e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -62,6 +62,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { private final static Logger logger = Logger.getLogger(ReadThreadingAssembler.class); private final static int DEFAULT_NUM_PATHS_PER_GRAPH = 128; + private final static int GGA_MODE_ARTIFICIAL_COUNTS = 1000; /** The min and max kmer sizes to try when building the graph. */ private final List kmerSizes; @@ -88,7 +89,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { } @Override - public List assemble( final List reads, final Haplotype refHaplotype) { + public List assemble( final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { final List graphs = new LinkedList<>(); for ( final int kmerSize : kmerSizes ) { @@ -96,6 +97,12 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // add the reference sequence to the graph rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); + int hapCount = 0; + for( final Haplotype h : activeAlleleHaplotypes ) { + final int[] counts = new int[h.length()]; + Arrays.fill(counts, GGA_MODE_ARTIFICIAL_COUNTS); + rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), counts, false); + } // Next pull kmers out of every read and throw them on the graph for( final GATKSAMRecord read : reads ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index 6e9223afb..8e879377f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -590,11 +590,9 @@ public class ReadThreadingGraph extends BaseGraph(), 10, new Haplotype(refCycle.getBytes(), true)); - final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true)); + final DeBruijnGraph g1 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(refCycle.getBytes(), true), Collections.emptyList()); + final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true), Collections.emptyList()); Assert.assertTrue(g1 == null, "Reference cycle graph should return null during creation."); Assert.assertTrue(g2 != null, "Reference non-cycle graph should not return null during creation."); @@ -147,7 +147,7 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { } } - assembler.addReadKmersToGraph(builder, Arrays.asList(read)); + assembler.addReadKmersToGraph(builder, Arrays.asList(read), Collections.emptyList()); Assert.assertEquals(builder.addedPairs.size(), expectedStarts.size()); for ( final Kmer addedKmer : builder.addedPairs ) { Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index 9ef9fea77..3f3b295f8 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "008029ee34e1becd8312e3c4d608033c"); + "38b4596c3910fdde51ea59aa1a8f848f"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "ae8d95ffe77515cc74a55c2afd142826"); + "08147870d73d9749ced8cfc7cdd4714f"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 91e80b45c..5fc0f4f52 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -96,7 +96,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "bb30d0761dc9e2dfd57bfe07b72d06d8"); + "ffd69c410dca0d2f9fe75f3cb5d08179"); } @Test diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java index a517e1cb1..74361de1b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java @@ -47,6 +47,9 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading.ReadThreadingAssembler; @@ -216,6 +219,10 @@ public class LocalAssemblyEngineUnitTest extends BaseTest { private List assemble(final Assembler assembler, final byte[] refBases, final GenomeLoc loc, final List reads) { final Haplotype refHaplotype = new Haplotype(refBases, true); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + final ActiveRegion activeRegion = new ActiveRegion(loc, null, true, genomeLocParser, 0); activeRegion.addAll(reads); final LocalAssemblyEngine engine = createAssembler(assembler); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java index 8efb3d486..3f10fc72c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java @@ -85,7 +85,7 @@ public class ReadThreadingAssemblerUnitTest extends BaseTest { public SeqGraph assemble() { assembler.removePathsNotConnectedToRef = false; // need to pass some of the tests assembler.setDebugGraphTransformations(true); - final SeqGraph graph = assembler.assemble(reads, refHaplotype).get(0); + final SeqGraph graph = assembler.assemble(reads, refHaplotype, Collections.emptyList()).get(0); if ( DEBUG ) graph.printGraph(new File("test.dot"), 0); return graph; } diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index 49157a206..b158d1509 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -55,17 +55,19 @@ public class MathUtils { private static final double JACOBIAN_LOG_TABLE_INV_STEP = 1.0 / JACOBIAN_LOG_TABLE_STEP; private static final double MAX_JACOBIAN_TOLERANCE = 8.0; private static final int JACOBIAN_LOG_TABLE_SIZE = (int) (MAX_JACOBIAN_TOLERANCE / JACOBIAN_LOG_TABLE_STEP) + 1; - private static final int MAXN = 70000; + private static final int MAXN = 70_000; private static final int LOG10_CACHE_SIZE = 4 * MAXN; // we need to be able to go up to 2*(2N) when calculating some of the coefficients /** * The smallest log10 value we'll emit from normalizeFromLog10 and other functions * where the real-space value is 0.0. */ - public final static double LOG10_P_OF_ZERO = -1000000.0; - public final static double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); - private final static double NATURAL_LOG_OF_TEN = Math.log(10.0); - private final static double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI); + public static final double LOG10_P_OF_ZERO = -1000000.0; + public static final double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + public static final double LOG_ONE_HALF = -Math.log10(2.0); + public static final double LOG_ONE_THIRD = -Math.log10(3.0); + private static final double NATURAL_LOG_OF_TEN = Math.log(10.0); + private static final double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI); static { log10Cache = new double[LOG10_CACHE_SIZE]; diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index f253fc9c9..b309ef633 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -221,7 +221,7 @@ public class PerReadAlleleLikelihoodMap { final int count = ReadUtils.getMeanRepresentativeReadCount(read); final double likelihood_iii = entry.getValue().get(iii_allele); final double likelihood_jjj = entry.getValue().get(jjj_allele); - haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + LOG_ONE_HALF); + haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + MathUtils.LOG_ONE_HALF); // fast exit. If this diploid pair is already worse than the max, just stop and look at the next pair if ( haplotypeLikelihood < maxElement ) break; @@ -241,7 +241,6 @@ public class PerReadAlleleLikelihoodMap { return new MostLikelyAllele(alleles.get(hap1), alleles.get(hap2), maxElement, maxElement); } - private static final double LOG_ONE_HALF = -Math.log10(2.0); /** * Given a map from alleles to likelihoods, find the allele with the largest likelihood. diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index bacee7942..1f932b222 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -46,7 +46,6 @@ public class Haplotype extends Allele { private EventMap eventMap = null; private Cigar cigar; private int alignmentStartHapwrtRef; - private Event artificialEvent = null; private double score = 0; /** @@ -93,11 +92,6 @@ public class Haplotype extends Allele { super(allele, true); } - protected Haplotype( final byte[] bases, final Event artificialEvent ) { - this(bases, false); - this.artificialEvent = artificialEvent; - } - public Haplotype( final byte[] bases, final GenomeLoc loc ) { this(bases, false); this.genomeLocation = loc; @@ -189,7 +183,7 @@ public class Haplotype extends Allele { } /** - * Get the cigar for this haplotype. Note that cigar is guarenteed to be consolidated + * Get the cigar for this haplotype. Note that the cigar is guaranteed to be consolidated * in that multiple adjacent equal operates will have been merged * @return the cigar of this haplotype */ @@ -223,30 +217,6 @@ public class Haplotype extends Allele { throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength()); } - public boolean isArtificialHaplotype() { - return artificialEvent != null; - } - - public Event getArtificialEvent() { - return artificialEvent; - } - - public Allele getArtificialRefAllele() { - return artificialEvent.ref; - } - - public Allele getArtificialAltAllele() { - return artificialEvent.alt; - } - - public int getArtificialAllelePosition() { - return artificialEvent.pos; - } - - public void setArtificialEvent( final Event artificialEvent ) { - this.artificialEvent = artificialEvent; - } - @Requires({"refInsertLocation >= 0"}) public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) { // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates @@ -260,7 +230,7 @@ public class Haplotype extends Allele { newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, 0, haplotypeInsertLocation)); // bases before the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, haplotypeInsertLocation + refAllele.length(), myBases.length)); // bases after the variant - return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation)); + return new Haplotype(newHaplotypeBases); } public static LinkedHashMap makeHaplotypeListFromAlleles(final List alleleList, From ed4f19d79b3d8039ae50c2122c88a76a6b9e5796 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Fri, 31 May 2013 11:28:29 -0400 Subject: [PATCH 24/99] Restore scala compilation by default in build.xml -This was accidentally clobbered in a recent commit. -If you want to compile Java-only, easiest thing to do is run "ant gatk" rather than modifying build.xml --- build.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.xml b/build.xml index d9b37f4de..2e9df4d5e 100644 --- a/build.xml +++ b/build.xml @@ -39,7 +39,7 @@ - + From 64b4d8072923612b38662aad984a836dc8093fcb Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 31 May 2013 13:16:00 -0400 Subject: [PATCH 25/99] Make BQSR calculateIsIndel robust to indel CIGARs are start/end of read -- The previous implementation attempted to be robust to this, but not all cases were handled properly. Added a helper function updateInde() that bounds up the update to be in the range of the indel array, and cleaned up logic of how the method works. The previous behavior was inconsistent across read fwd/rev stand, so that the indel cigars at the end of read were put at the start of reads if the reads were in the forward strand but not if they were in the reverse strand. Everything is now consistent, as can be seen in the symmetry of the unit tests: tests.add(new Object[]{"1D3M", false, EventType.BASE_DELETION, new int[]{0,0,0}}); tests.add(new Object[]{"1M1D2M", false, EventType.BASE_DELETION, new int[]{1,0,0}}); tests.add(new Object[]{"2M1D1M", false, EventType.BASE_DELETION, new int[]{0,1,0}}); tests.add(new Object[]{"3M1D", false, EventType.BASE_DELETION, new int[]{0,0,1}}); tests.add(new Object[]{"1D3M", true, EventType.BASE_DELETION, new int[]{1,0,0}}); tests.add(new Object[]{"1M1D2M", true, EventType.BASE_DELETION, new int[]{0,1,0}}); tests.add(new Object[]{"2M1D1M", true, EventType.BASE_DELETION, new int[]{0,0,1}}); tests.add(new Object[]{"3M1D", true, EventType.BASE_DELETION, new int[]{0,0,0}}); tests.add(new Object[]{"4M1I", false, EventType.BASE_INSERTION, new int[]{0,0,0,1,0}}); tests.add(new Object[]{"3M1I1M", false, EventType.BASE_INSERTION, new int[]{0,0,1,0,0}}); tests.add(new Object[]{"2M1I2M", false, EventType.BASE_INSERTION, new int[]{0,1,0,0,0}}); tests.add(new Object[]{"1M1I3M", false, EventType.BASE_INSERTION, new int[]{1,0,0,0,0}}); tests.add(new Object[]{"1I4M", false, EventType.BASE_INSERTION, new int[]{0,0,0,0,0}}); tests.add(new Object[]{"4M1I", true, EventType.BASE_INSERTION, new int[]{0,0,0,0,0}}); tests.add(new Object[]{"3M1I1M", true, EventType.BASE_INSERTION, new int[]{0,0,0,0,1}}); tests.add(new Object[]{"2M1I2M", true, EventType.BASE_INSERTION, new int[]{0,0,0,1,0}}); tests.add(new Object[]{"1M1I3M", true, EventType.BASE_INSERTION, new int[]{0,0,1,0,0}}); tests.add(new Object[]{"1I4M", true, EventType.BASE_INSERTION, new int[]{0,1,0,0,0}}); -- delivers #50445353 --- .../gatk/walkers/bqsr/BaseRecalibrator.java | 22 ++++++++++--------- .../walkers/bqsr/BQSRIntegrationTest.java | 2 +- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index 278317da3..c60eceaa4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -366,9 +366,7 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } protected static int[] calculateIsIndel( final GATKSAMRecord read, final EventType mode ) { - final byte[] readBases = read.getReadBases(); - final int[] indel = new int[readBases.length]; - Arrays.fill(indel, 0); + final int[] indel = new int[read.getReadBases().length]; int readPos = 0; for ( final CigarElement ce : read.getCigar().getCigarElements() ) { final int elementLength = ce.getLength(); @@ -383,21 +381,19 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } case D: { - final int index = ( read.getReadNegativeStrandFlag() ? readPos : ( readPos > 0 ? readPos - 1 : readPos ) ); - indel[index] = ( mode.equals(EventType.BASE_DELETION) ? 1 : 0 ); + final int index = ( read.getReadNegativeStrandFlag() ? readPos : readPos - 1 ); + updateIndel(indel, index, mode, EventType.BASE_DELETION); break; } case I: { final boolean forwardStrandRead = !read.getReadNegativeStrandFlag(); if( forwardStrandRead ) { - indel[(readPos > 0 ? readPos - 1 : readPos)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); - } - for (int iii = 0; iii < elementLength; iii++) { - readPos++; + updateIndel(indel, readPos - 1, mode, EventType.BASE_INSERTION); } + readPos += elementLength; if( !forwardStrandRead ) { - indel[(readPos < indel.length ? readPos : readPos - 1)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); + updateIndel(indel, readPos, mode, EventType.BASE_INSERTION); } break; } @@ -412,6 +408,12 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche return indel; } + private static void updateIndel(final int[] indel, final int index, final EventType mode, final EventType requiredMode) { + if ( mode == requiredMode && index >= 0 && index < indel.length ) + // protect ourselves from events at the start or end of the read (1D3M or 3M1D) + indel[index] = 1; + } + protected static double[] calculateFractionalErrorArray( final int[] errorArray, final byte[] baqArray ) { if(errorArray.length != baqArray.length ) { throw new ReviewedStingException("Array length mismatch detected. Malformed read?"); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java index 907046704..71c29fe0b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java @@ -111,7 +111,7 @@ public class BQSRIntegrationTest extends WalkerTest { {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "85a120b7d86b61597b86b9e93decbdfc")}, {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "5248dc49aec0323c74b496bb4928c73c")}, {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "cb52f267e0010f849f50b0bf1de474a1")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "1425a5063ee757dbfc013df24e65a67a")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "fb372d0a8fc41b01ced1adab31546850")}, {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "c1c3cda8caceed619d3d439c3990cd26")}, {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "c9953f020a65c1603a6d71aeeb1b95f3")}, {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "5bfff0c699345cca12a9b33acf95588f")}, From 4b206a3540485a4a747df59ca127ef6d4305d4bd Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 31 May 2013 13:54:33 -0400 Subject: [PATCH 26/99] Check that -compress arguments are within range 0-9 -- Although the original bug report was about SplitSamFile it actually was an engine wide error. The two places in the that provide compression to the BAM write now check the validity of the compress argument via a static method in ReadUtils -- delivers #49531009 --- .../SAMFileWriterArgumentTypeDescriptor.java | 7 ++++--- .../sting/utils/sam/ReadUtils.java | 8 ++++++++ .../gatk/EngineFeaturesIntegrationTest.java | 16 ++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/SAMFileWriterArgumentTypeDescriptor.java b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/SAMFileWriterArgumentTypeDescriptor.java index 458846db0..3b89787ad 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/SAMFileWriterArgumentTypeDescriptor.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/SAMFileWriterArgumentTypeDescriptor.java @@ -30,6 +30,7 @@ import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.sam.ReadUtils; import java.io.OutputStream; import java.lang.annotation.Annotation; @@ -132,9 +133,9 @@ public class SAMFileWriterArgumentTypeDescriptor extends ArgumentTypeDescriptor if (writerFileName != null && writerFileName.asFile() != null ) { stub = new SAMFileWriterStub(engine, writerFileName.asFile()); - if ( compressionLevel != null ) - stub.setCompressionLevel(compressionLevel); - if ( indexOnTheFly ) + if ( compressionLevel != null ) { + stub.setCompressionLevel(ReadUtils.validateCompressionLevel(compressionLevel)); + } if ( indexOnTheFly ) stub.setIndexOnTheFly(indexOnTheFly); if ( generateMD5 ) stub.setGenerateMD5(generateMD5); diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index 5b15fdd1b..cf1c9cb8e 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -36,6 +36,7 @@ import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.NGSPlatform; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.exceptions.UserException; import java.io.File; import java.util.*; @@ -152,11 +153,18 @@ public class ReadUtils { * @return a SAMFileWriter with the compression level if it is a bam. */ public static SAMFileWriter createSAMFileWriterWithCompression(SAMFileHeader header, boolean presorted, String file, int compression) { + validateCompressionLevel(compression); if (file.endsWith(".bam")) return new SAMFileWriterFactory().makeBAMWriter(header, presorted, new File(file), compression); return new SAMFileWriterFactory().makeSAMOrBAMWriter(header, presorted, new File(file)); } + public static int validateCompressionLevel(final int requestedCompressionLevel) { + if ( requestedCompressionLevel < 0 || requestedCompressionLevel > 9 ) + throw new UserException.BadArgumentValue("compress", "Compression level must be 0-9 but got " + requestedCompressionLevel); + return requestedCompressionLevel; + } + /** * is this base inside the adaptor of the read? * diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index c60c6430c..6cfa90d90 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -174,4 +174,20 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { 1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab")); executeTest("testUserReadFilterAppliedBeforeWalker", spec); } + + @Test + public void testNegativeCompress() { + testBadCompressArgument(-1); + } + + @Test + public void testTooBigCompress() { + testBadCompressArgument(100); + } + + private void testBadCompressArgument(final int compress) { + WalkerTestSpec spec = new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I private/testdata/NA12878.1_10mb_2_10mb.bam -o %s -compress " + compress, + 1, UserException.class); + executeTest("badCompress " + compress, spec); + } } \ No newline at end of file From 6555361742e64829183c9cb056795f0fc0b43443 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 31 May 2013 15:21:12 -0400 Subject: [PATCH 27/99] Fix error in merging code in HC -- Ultimately this was caused by an underlying bug in the reverting of soft clipped bases in the read clipper. The read clipper would fail to properly set the alignment start for reads that were 100% clipped before reverting, such as 10H2S5H => 10H2M5H. This has been fixed and unit tested. -- Update 1 ReduceReads MD5, which was due to cases where we were clipping away all of the MATCH part of the read, leaving a cigar like 50H11S and the revert soft clips was failing to properly revert the bases. -- delivers #50655421 --- .../ReduceReadsIntegrationTest.java | 2 +- .../sting/utils/clipping/ClippingOp.java | 47 +++++++++++-------- .../utils/clipping/ReadClipperUnitTest.java | 9 +++- 3 files changed, 37 insertions(+), 21 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java index 405e616f1..4fbbe1d0c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java @@ -260,7 +260,7 @@ public class ReduceReadsIntegrationTest extends WalkerTest { public void testDivideByZero() { String base = String.format("-T ReduceReads %s -npt -R %s -I %s", DIVIDEBYZERO_L, REF, DIVIDEBYZERO_BAM) + " -o %s "; // we expect to lose coverage due to the downsampling so don't run the systematic tests - executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("c459a6153a17c2cbf8441e1918fda9c8"))); + executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("4f0ef477c0417d1eb602b323474ef377"))); } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java index f51881e0b..2c2cbd98f 100644 --- a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java +++ b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java @@ -35,6 +35,7 @@ import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.util.Iterator; +import java.util.List; import java.util.Stack; import java.util.Vector; @@ -559,26 +560,34 @@ public class ClippingOp { return new CigarShift(cleanCigar, shiftFromStart, shiftFromEnd); } + /** + * Compute the offset of the first "real" position in the cigar on the genome + * + * This is defined as a first position after a run of Hs followed by a run of Ss + * + * @param cigar A non-null cigar + * @return the offset (from 0) of the first on-genome base + */ + private int calcHardSoftOffset(final Cigar cigar) { + final List elements = cigar.getCigarElements(); + + int size = 0; + int i = 0; + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.HARD_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.SOFT_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + + return size; + } + private int calculateAlignmentStartShift(Cigar oldCigar, Cigar newCigar) { - int newShift = 0; - int oldShift = 0; - - boolean readHasStarted = false; // if the new cigar is composed of S and H only, we have to traverse the entire old cigar to calculate the shift - for (CigarElement cigarElement : newCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - newShift += cigarElement.getLength(); - else { - readHasStarted = true; - break; - } - } - - for (CigarElement cigarElement : oldCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - oldShift += cigarElement.getLength(); - else if (readHasStarted) - break; - } + final int newShift = calcHardSoftOffset(newCigar); + final int oldShift = calcHardSoftOffset(oldCigar); return newShift - oldShift; } diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java index 6ec4336b0..0b4153535 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java @@ -48,7 +48,7 @@ import java.util.List; public class ReadClipperUnitTest extends BaseTest { List cigarList; - int maximumCigarSize = 6; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 + int maximumCigarSize = 10; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 @BeforeClass public void init() { @@ -391,4 +391,11 @@ public class ReadClipperUnitTest extends BaseTest { } } + @Test(enabled = true) + public void testRevertEntirelySoftclippedReads() { + GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H"); + GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read); + Assert.assertEquals(clippedRead.getAlignmentStart(), read.getSoftStart()); + } + } \ No newline at end of file From ab40f4af43a28dacc4b3a87d007c4b0b08b4cc83 Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Mon, 3 Jun 2013 11:01:34 -0400 Subject: [PATCH 28/99] Break out the GGA kmers and the read kmers into separate functions for the DeBruijn assembler. -- Added unit test for new function. --- .../haplotypecaller/DeBruijnAssembler.java | 30 ++++++++++--- .../haplotypecaller/graphs/SeqGraph.java | 6 +-- .../readthreading/ReadThreadingAssembler.java | 2 + .../DeBruijnAssemblerUnitTest.java | 45 ++++++++++++++++++- .../broadinstitute/sting/utils/MathUtils.java | 8 +--- 5 files changed, 74 insertions(+), 17 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 3c0642f83..d876a403b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -143,8 +143,13 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { // something went wrong, so abort right now with a null graph return null; - // now go through the graph already seeded with the reference sequence and add the read kmers to it as well as the artificial GGA haplotypes - if ( ! addReadKmersToGraph(builder, reads, activeAlleleHaplotypes) ) + // add the artificial GGA haplotypes to the graph + if ( ! addGGAKmersToGraph(builder, activeAlleleHaplotypes) ) + // something went wrong, so abort right now with a null graph + return null; + + // now go through the graph already seeded with the reference sequence and add the read kmers to it + if ( ! addReadKmersToGraph(builder, reads) ) // some problem was detected adding the reads to the graph, return null to indicate we failed return null; @@ -153,17 +158,16 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } /** - * Add the high-quality kmers from the reads to the graph + * Add the high-quality kmers from the artificial GGA haplotypes to the graph * * @param builder a debruijn graph builder to add the read kmers to - * @param reads a non-null list of reads whose kmers we want to add to the graph * @param activeAlleleHaplotypes a list of haplotypes to add to the graph for GGA mode * @return true if we successfully added the read kmers to the graph without corrupting it in some way */ - protected boolean addReadKmersToGraph(final DeBruijnGraphBuilder builder, final List reads, final List activeAlleleHaplotypes) { + protected boolean addGGAKmersToGraph(final DeBruijnGraphBuilder builder, final List activeAlleleHaplotypes) { + final int kmerLength = builder.getKmerSize(); - // First pull kmers out of the artificial GGA haplotypes and throw them on the graph for( final Haplotype haplotype : activeAlleleHaplotypes ) { final int end = haplotype.length() - kmerLength; for( int start = 0; start < end; start++ ) { @@ -171,6 +175,20 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } } + // always returns true now, but it's possible that we'd add kmers and decide we don't like the graph in some way + return true; + } + + /** + * Add the high-quality kmers from the reads to the graph + * + * @param builder a debruijn graph builder to add the read kmers to + * @param reads a non-null list of reads whose kmers we want to add to the graph + * @return true if we successfully added the read kmers to the graph without corrupting it in some way + */ + protected boolean addReadKmersToGraph(final DeBruijnGraphBuilder builder, final List reads) { + final int kmerLength = builder.getKmerSize(); + // Next pull kmers out of every read and throw them on the graph for( final GATKSAMRecord read : reads ) { final byte[] sequence = read.getReadBases(); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 20edcb39b..06c127a84 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -352,7 +352,7 @@ public final class SeqGraph extends BaseGraph { * Merge until the graph has no vertices that are candidates for merging */ public boolean transformUntilComplete() { - boolean didAtLeastOneTranform = false; + boolean didAtLeastOneTransform = false; boolean foundNodesToMerge = true; while( foundNodesToMerge ) { foundNodesToMerge = false; @@ -360,13 +360,13 @@ public final class SeqGraph extends BaseGraph { for( final SeqVertex v : vertexSet() ) { foundNodesToMerge = tryToTransform(v); if ( foundNodesToMerge ) { - didAtLeastOneTranform = true; + didAtLeastOneTransform = true; break; } } } - return didAtLeastOneTranform; + return didAtLeastOneTransform; } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index 3d4d38d8e..bd24891bc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -97,6 +97,8 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // add the reference sequence to the graph rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); + + // add the artificial GGA haplotypes to the graph int hapCount = 0; for( final Haplotype h : activeAlleleHaplotypes ) { final int[] counts = new int[h.length()]; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java index 2ca78f306..95592241d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java @@ -147,7 +147,50 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { } } - assembler.addReadKmersToGraph(builder, Arrays.asList(read), Collections.emptyList()); + assembler.addReadKmersToGraph(builder, Arrays.asList(read)); + Assert.assertEquals(builder.addedPairs.size(), expectedStarts.size()); + for ( final Kmer addedKmer : builder.addedPairs ) { + Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); + } + } + + @DataProvider(name = "AddGGAKmersToGraph") + public Object[][] makeAddGGAKmersToGraphData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final String bases = "ACGTAACCGGTTAAACCCGGGTTT"; + final int readLen = bases.length(); + final List allBadStarts = new ArrayList(readLen); + for ( int i = 0; i < readLen; i++ ) allBadStarts.add(i); + + for ( final int kmerSize : Arrays.asList(3, 4, 5) ) { + tests.add(new Object[]{bases, kmerSize}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AddGGAKmersToGraph", enabled = ! DEBUG) + public void testAddGGAKmersToGraph(final String bases, final int kmerSize) { + final int readLen = bases.length(); + final DeBruijnAssembler assembler = new DeBruijnAssembler(); + final MockBuilder builder = new MockBuilder(kmerSize); + + final Set expectedBases = new HashSet(); + final Set expectedStarts = new LinkedHashSet(); + for ( int i = 0; i < readLen; i++) { + boolean good = true; + for ( int j = 0; j < kmerSize + 1; j++ ) { // +1 is for pairing + good &= i + j < readLen; + } + if ( good ) { + expectedStarts.add(i); + expectedBases.add(bases.substring(i, i + kmerSize + 1)); + } + } + + assembler.addGGAKmersToGraph(builder, Arrays.asList(new Haplotype(bases.getBytes()))); Assert.assertEquals(builder.addedPairs.size(), expectedStarts.size()); for ( final Kmer addedKmer : builder.addedPairs ) { Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index b158d1509..dfd3537da 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -244,9 +244,6 @@ public class MathUtils { public static double sumLog10(final double[] log10values) { return Math.pow(10.0, log10sumLog10(log10values)); - // double s = 0.0; - // for ( double v : log10values) s += Math.pow(10.0, v); - // return s; } public static double log10sumLog10(final double[] log10values) { @@ -859,11 +856,8 @@ public class MathUtils { break; sum += x; i++; - //System.out.printf(" %d/%d", sum, i); } - //System.out.printf("Sum = %d, n = %d, maxI = %d, avg = %f%n", sum, i, maxI, (1.0 * sum) / i); - return (1.0 * sum) / i; } @@ -1359,7 +1353,7 @@ public class MathUtils { } /** - * Compute in a numerical correct way the quanity log10(1-x) + * Compute in a numerical correct way the quantity log10(1-x) * * Uses the approximation log10(1-x) = log10(1/x - 1) + log10(x) to avoid very quick underflow * in 1-x when x is very small From c9f5b53efa8307add66b4f1fc1d689a0818db443 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 3 Jun 2013 14:36:54 -0400 Subject: [PATCH 29/99] Bugfix for HC can fail to assemble the correct reference sequence in some cases -- Ultimately this was caused by overly aggressive merging of CommonSuffixMerger. In the case where you have this graph: ACT [ref source] -> C G -> ACT -> C we would merge into G -> ACT -> C which would linearlize into GACTC Causing us to add bases to the reference source node that couldn't be recovered. The solution was to ensure that CommonSuffixMerger only operates when all nodes to be merged aren't source nodes themselves. -- Added a convenient argument to the haplotype caller (captureAssemblyFailureBAM) that will write out the exact reads to a BAM file that went into a failed assembly run (going to a file called AssemblyFailure.BAM). This can be used to rerun the haplotype caller to produce the exact error, which can be hard in regions of deep coverage where the downsampler state determines the exact reads going into assembly and therefore makes running with a sub-interval not reproduce the error -- Did some misc. cleanup of code while debugging -- [delivers #50917729] --- .../haplotypecaller/HaplotypeCaller.java | 30 ++++++--- .../haplotypecaller/LocalAssemblyEngine.java | 38 +++++++----- .../haplotypecaller/graphs/BaseGraph.java | 61 +++++++++++++++++++ .../haplotypecaller/graphs/SeqGraph.java | 17 ++++-- .../graphs/SharedSequenceMerger.java | 8 ++- .../readthreading/ReadThreadingAssembler.java | 6 +- .../graphs/CommonSuffixMergerUnitTest.java | 16 +++++ 7 files changed, 147 insertions(+), 29 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index e0a755c7b..73367f8c3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -50,6 +50,7 @@ import com.google.java.contract.Ensures; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMFileWriter; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.arguments.DbsnpArgumentCollection; @@ -387,6 +388,10 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="dontUseSoftClippedBases", shortName="dontUseSoftClippedBases", doc="If specified, we will not analyze soft clipped bases in the reads", required = false) protected boolean dontUseSoftClippedBases = false; + @Hidden + @Argument(fullName="captureAssemblyFailureBAM", shortName="captureAssemblyFailureBAM", doc="If specified, we will write a BAM called assemblyFailure.bam capturing all of the reads that were in the active region when the assembler failed for any reason", required = false) + protected boolean captureAssemblyFailureBAM = false; + @Hidden @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) protected boolean allowCyclesInKmerGraphToGeneratePaths = false; @@ -751,13 +756,24 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); final Haplotype referenceHaplotype = createReferenceHaplotype(activeRegion, paddedReferenceLoc); - final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); - - if ( ! dontTrimActiveRegions ) { - return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); - } else { - // we don't want to trim active regions, so go ahead and use the old one - return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); + try { + final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); + if ( ! dontTrimActiveRegions ) { + return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); + } else { + // we don't want to trim active regions, so go ahead and use the old one + return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); + } + } catch ( Exception e ) { + // Capture any exception that might be thrown, and write out the assembly failure BAM if requested + if ( captureAssemblyFailureBAM ) { + final SAMFileWriter writer = ReadUtils.createSAMFileWriterWithCompression(getToolkit().getSAMFileHeader(), true, "assemblyFailure.bam", 5); + for ( final GATKSAMRecord read : activeRegion.getReads() ) { + writer.addAlignment(read); + } + writer.close(); + } + throw e; } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 3a377409c..1a5f34bc3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -78,6 +78,10 @@ import java.util.*; public abstract class LocalAssemblyEngine { private final static Logger logger = Logger.getLogger(LocalAssemblyEngine.class); + /** + * If false, we will only write out a region around the reference source + */ + private final static boolean PRINT_FULL_GRAPH_FOR_DEBUGGING = true; public static final byte DEFAULT_MIN_BASE_QUALITY_TO_USE = (byte) 8; private static final int MIN_HAPLOTYPE_REFERENCE_LENGTH = 30; @@ -252,20 +256,26 @@ public abstract class LocalAssemblyEngine { return false; } - protected SeqGraph cleanupSeqGraph(final SeqGraph seqGraph) { - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.1.dot"), pruneFactor); + /** + * Print graph to file if debugGraphTransformations is enabled + * @param graph the graph to print + * @param file the destination file + */ + protected void printDebugGraphTransform(final BaseGraph graph, final File file) { + if ( debugGraphTransformations ) { + if ( PRINT_FULL_GRAPH_FOR_DEBUGGING ) + graph.printGraph(file, pruneFactor); + else + graph.subsetToRefSource().printGraph(file, pruneFactor); + } + } + + protected SeqGraph cleanupSeqGraph(final SeqGraph seqGraph) { + printDebugGraphTransform(seqGraph, new File("sequenceGraph.1.dot")); - // TODO -- we need to come up with a consistent pruning algorithm. The current pruning algorithm - // TODO -- works well but it doesn't differentiate between an isolated chain that doesn't connect - // TODO -- to anything from one that's actually has good support along the chain but just happens - // TODO -- to have a connection in the middle that has weight of < pruneFactor. Ultimately - // TODO -- the pruning algorithm really should be an error correction algorithm that knows more - // TODO -- about the structure of the data and can differentiate between an infrequent path but - // TODO -- without evidence against it (such as occurs when a region is hard to get any reads through) - // TODO -- from a error with lots of weight going along another similar path // the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive seqGraph.zipLinearChains(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.2.zipped.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.2.zipped.dot")); // now go through and prune the graph, removing vertices no longer connected to the reference chain // IMPORTANT: pruning must occur before we call simplifyGraph, as simplifyGraph adds 0 weight @@ -273,9 +283,9 @@ public abstract class LocalAssemblyEngine { seqGraph.pruneGraph(pruneFactor); seqGraph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.3.pruned.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.3.pruned.dot")); seqGraph.simplifyGraph(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.4.merged.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.4.merged.dot")); // The graph has degenerated in some way, so the reference source and/or sink cannot be id'd. Can // happen in cases where for example the reference somehow manages to acquire a cycle, or @@ -294,7 +304,7 @@ public abstract class LocalAssemblyEngine { seqGraph.addVertex(dummy); seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0)); } - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.5.final.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.5.final.dot")); return seqGraph; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index 8938af7c2..c963fb6e5 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -388,6 +388,17 @@ public class BaseGraph extends Default return s; } + /** + * Get the set of vertices connected to v by incoming or outgoing edges + * @param v a non-null vertex + * @return a set of vertices {X} connected X -> v or v -> Y + */ + public Set neighboringVerticesOf(final V v) { + final Set s = incomingVerticesOf(v); + s.addAll(outgoingVerticesOf(v)); + return s; + } + /** * Print out the graph in the dot language for visualization * @param destination File to write to @@ -664,4 +675,54 @@ public class BaseGraph extends Default "kmerSize=" + kmerSize + '}'; } + + /** + * Get the set of vertices within distance edges of source, regardless of edge direction + * + * @param source the source vertex to consider + * @param distance the distance + * @return a set of vertices within distance of source + */ + protected Set verticesWithinDistance(final V source, final int distance) { + if ( distance == 0 ) + return Collections.singleton(source); + + final Set found = new HashSet<>(); + found.add(source); + for ( final V v : neighboringVerticesOf(source) ) { + found.addAll(verticesWithinDistance(v, distance - 1)); + } + + return found; + } + + /** + * Get a graph containing only the vertices within distance edges of target + * @param target a vertex in graph + * @param distance the max distance + * @return a non-null graph + */ + public BaseGraph subsetToNeighbors(final V target, final int distance) { + if ( target == null ) throw new IllegalArgumentException("Target cannot be null"); + if ( ! containsVertex(target) ) throw new IllegalArgumentException("Graph doesn't contain vertex " + target); + if ( distance < 0 ) throw new IllegalArgumentException("Distance must be >= 0 but got " + distance); + + + final Set toKeep = verticesWithinDistance(target, distance); + final Set toRemove = new HashSet<>(vertexSet()); + toRemove.removeAll(toKeep); + + final BaseGraph result = (BaseGraph)clone(); + result.removeAllVertices(toRemove); + + return result; + } + + /** + * Get a subgraph of graph that contains only vertices within 10 edges of the ref source vertex + * @return a non-null subgraph of this graph + */ + public BaseGraph subsetToRefSource() { + return subsetToNeighbors(getReferenceSourceVertex(), 10); + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 06c127a84..36c515073 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -155,20 +155,29 @@ public final class SeqGraph extends BaseGraph { //logger.info("simplifyGraph iteration " + i); // iterate until we haven't don't anything useful boolean didSomeWork = false; - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".1.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".1.dot")); didSomeWork |= new MergeDiamonds().transformUntilComplete(); didSomeWork |= new MergeTails().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot")); didSomeWork |= new SplitCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".3.split_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".3.split_suffix.dot")); didSomeWork |= new MergeCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot")); didSomeWork |= zipLinearChains(); return didSomeWork; } + /** + * Print simplication step of this graph, if PRINT_SIMPLIFY_GRAPHS is enabled + * @param file the destination for the graph DOT file + */ + private void printGraphSimplification(final File file) { + if ( PRINT_SIMPLIFY_GRAPHS ) + subsetToNeighbors(getReferenceSourceVertex(), 5).printGraph(file, 0); + } + /** * Zip up all of the simple linear chains present in this graph. * diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java index 0babd8d56..5d725b1dd 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java @@ -81,7 +81,7 @@ public class SharedSequenceMerger { else { // graph.printGraph(new File("csm." + counter + "." + v.getSequenceString() + "_pre.dot"), 0); - final List edgesToRemove = new LinkedList(); + final List edgesToRemove = new LinkedList<>(); final byte[] prevSeq = prevs.iterator().next().getSequence(); final SeqVertex newV = new SeqVertex(ArrayUtils.addAll(prevSeq, v.getSequence())); graph.addVertex(newV); @@ -124,11 +124,17 @@ public class SharedSequenceMerger { final SeqVertex first = incomingVertices.iterator().next(); for ( final SeqVertex prev : incomingVertices) { if ( ! prev.seqEquals(first) ) + // cannot merge if our sequence isn't the same as the first sequence return false; final Collection prevOuts = graph.outgoingVerticesOf(prev); if ( prevOuts.size() != 1 ) + // prev -> v must be the only edge from prev return false; if ( prevOuts.iterator().next() != v ) + // don't allow cyles + return false; + if ( graph.inDegreeOf(prev) == 0 ) + // cannot merge when any of the incoming nodes are sources return false; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index bd24891bc..123b36640 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -113,7 +113,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // actually build the read threading graph rtgraph.buildGraphIfNecessary(); - if ( debugGraphTransformations ) rtgraph.printGraph(new File("sequenceGraph.0.0.raw_readthreading_graph.dot"), pruneFactor); + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.0.raw_readthreading_graph.dot")); // go through and prune all of the chains where all edges have <= pruneFactor. This must occur // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering @@ -128,7 +128,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // remove all heading and trailing paths if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); - if ( debugGraphTransformations ) rtgraph.printGraph(new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot"), pruneFactor); + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot")); final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); @@ -136,7 +136,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { if ( justReturnRawGraph ) return Collections.singletonList(initialSeqGraph); if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); - if ( debugGraphTransformations ) initialSeqGraph.printGraph(new File("sequenceGraph.0.2.initial_seqgraph.dot"), pruneFactor); + printDebugGraphTransform(initialSeqGraph, new File("sequenceGraph.0.2.initial_seqgraph.dot")); initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java index cfed2f0b8..e1398e119 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java @@ -166,4 +166,20 @@ public class CommonSuffixMergerUnitTest extends BaseTest { splitter.merge(data.graph, data.v); assertSameHaplotypes(String.format("suffixMerge.%s.%d", data.commonSuffix, data.graph.vertexSet().size()), data.graph, original); } + + @Test + public void testDoesntMergeSourceNodes() { + final SeqGraph g = new SeqGraph(); + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("A"); + final SeqVertex v3 = new SeqVertex("A"); + final SeqVertex top = new SeqVertex("T"); + final SeqVertex b = new SeqVertex("C"); + g.addVertices(top, v1, v2, v3, top, b); + g.addEdges(top, v1, b); + g.addEdges(v2, b); // v2 doesn't have previous node, cannot be merged + g.addEdges(top, v3, b); + final SharedSequenceMerger merger = new SharedSequenceMerger(); + Assert.assertFalse(merger.merge(g, b), "Shouldn't be able to merge shared vertices, when one is a source"); + } } From e19c24f3ee0bb8fc3146947f1f4ff59de6a9145f Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Tue, 4 Jun 2013 09:35:12 -0400 Subject: [PATCH 30/99] Bugfix for HaplotypeCaller error: Only one of refStart or refStop must be < 0, not both -- This occurred because we were reverting reads with soft clips that would produce reads with negative (or 0) alignment starts. From such reads we could end up with adaptor starts that were negative and that would ultimately produce the "Only one of refStart or refStop must be < 0, not both" error in the FragmentUtils merging code (which would revert and adaptor clip reads). -- We now hard clip away bases soft clipped reverted bases that fall before the 1-based contig start in revertSoftClippedBases. -- Replace buggy cigarFromString with proper SAM-JDK call TextCigarCodec.getSingleton().decode(cigarString) -- Added unit tests for reverting soft clipped bases that create a read before the contig -- [delivers #50892431] --- .../sting/utils/clipping/ClippingOp.java | 34 ++++++-- .../utils/clipping/ReadClipperTestUtils.java | 83 +------------------ .../utils/clipping/ReadClipperUnitTest.java | 59 ++++++++++--- .../fragments/FragmentUtilsUnitTest.java | 48 +++++++++++ 4 files changed, 126 insertions(+), 98 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java index 2c2cbd98f..836c16a7e 100644 --- a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java +++ b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java @@ -194,9 +194,17 @@ public class ClippingOp { unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH)); unclipped.setCigar(unclippedCigar); - unclipped.setAlignmentStart(read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar)); + final int newStart = read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar); + unclipped.setAlignmentStart(newStart); - return unclipped; + if ( newStart <= 0 ) { + // if the start of the unclipped read occurs before the contig, + // we must hard clip away the bases since we cannot represent reads with + // negative or 0 alignment start values in the SAMRecord (e.g., 0 means unaligned) + return hardClip(unclipped, 0, - newStart); + } else { + return unclipped; + } } /** @@ -335,7 +343,24 @@ public class ClippingOp { return newCigar; } - @Requires({"start <= stop", "start == 0 || stop == read.getReadLength() - 1"}) + /** + * Hard clip bases from read, from start to stop in base coordinates + * + * If start == 0, then we will clip from the front of the read, otherwise we clip + * from the right. If start == 0 and stop == 10, this would clip out the first + * 10 bases of the read. + * + * Note that this function works with reads with negative alignment starts, in order to + * allow us to hardClip reads that have had their soft clips reverted and so might have + * negative alignment starts + * + * Works properly with reduced reads and insertion/deletion base qualities + * + * @param read a non-null read + * @param start a start >= 0 and < read.length + * @param stop a stop >= 0 and < read.length. + * @return a cloned version of read that has been properly trimmed down + */ private GATKSAMRecord hardClip(GATKSAMRecord read, int start, int stop) { final int firstBaseAfterSoftClips = read.getAlignmentStart() - read.getSoftStart(); final int lastBaseBeforeSoftClips = read.getSoftEnd() - read.getSoftStart(); @@ -343,7 +368,6 @@ public class ClippingOp { if (start == firstBaseAfterSoftClips && stop == lastBaseBeforeSoftClips) // note that if the read has no soft clips, these constants will be 0 and read length - 1 (beauty of math). return GATKSAMRecord.emptyRead(read); - // If the read is unmapped there is no Cigar string and neither should we create a new cigar string CigarShift cigarShift = (read.getReadUnmappedFlag()) ? new CigarShift(new Cigar(), 0, 0) : hardClipCigar(read.getCigar(), start, stop); @@ -357,7 +381,7 @@ public class ClippingOp { System.arraycopy(read.getReadBases(), copyStart, newBases, 0, newLength); System.arraycopy(read.getBaseQualities(), copyStart, newQuals, 0, newLength); - GATKSAMRecord hardClippedRead; + final GATKSAMRecord hardClippedRead; try { hardClippedRead = (GATKSAMRecord) read.clone(); } catch (CloneNotSupportedException e) { diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java index 0e0f6322e..cbbc8252b 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java @@ -28,8 +28,8 @@ package org.broadinstitute.sting.utils.clipping; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; @@ -38,13 +38,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Stack; -/** - * Created by IntelliJ IDEA. - * User: roger - * Date: 11/27/11 - * Time: 6:45 AM - * To change this template use File | Settings | File Templates. - */ public class ReadClipperTestUtils { //Should contain all the utils needed for tests to mass produce //reads, cigars, and other needed classes @@ -236,78 +229,6 @@ public class ReadClipperTestUtils { } public static Cigar cigarFromString(String cigarString) { - Cigar cigar = new Cigar(); - - boolean isNumber = false; - int number = 0; - for (int i = 0; i < cigarString.length(); i++) { - char x = cigarString.charAt(i); - - if (x >= '0' && x <='9') { - if (isNumber) { - number *= 10; - } - else { - isNumber = true; - } - number += x - '0'; - } - - else { - CigarElement e; - switch (x) { - case 'M': - case 'm': - e = new CigarElement(number, CigarOperator.M); - break; - - case 'I': - case 'i': - e = new CigarElement(number, CigarOperator.I); - break; - - case 'D': - case 'd': - e = new CigarElement(number, CigarOperator.D); - break; - - case 'S': - case 's': - e = new CigarElement(number, CigarOperator.S); - break; - - case 'N': - case 'n': - e = new CigarElement(number, CigarOperator.N); - break; - - case 'H': - case 'h': - e = new CigarElement(number, CigarOperator.H); - break; - - case 'P': - case 'p': - e = new CigarElement(number, CigarOperator.P); - break; - - case '=': - e = new CigarElement(number, CigarOperator.EQ); - break; - - case 'X': - case 'x': - e = new CigarElement(number, CigarOperator.X); - break; - - default: - throw new ReviewedStingException("Unrecognized cigar operator: " + x + " (number: " + number + ")"); - } - cigar.add(e); - } - } - return cigar; + return TextCigarCodec.getSingleton().decode(cigarString); } - - } diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java index 0b4153535..d6bd0d4d2 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java @@ -46,6 +46,7 @@ import java.util.List; * Date: 9/28/11 */ public class ReadClipperUnitTest extends BaseTest { + private final static boolean DEBUG = false; List cigarList; int maximumCigarSize = 10; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 @@ -55,7 +56,7 @@ public class ReadClipperUnitTest extends BaseTest { cigarList = ReadClipperTestUtils.generateCigarList(maximumCigarSize); } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipBothEndsByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -71,7 +72,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReadCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -101,7 +102,7 @@ public class ReadClipperUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ClippedReadLengthData", enabled = true) + @Test(dataProvider = "ClippedReadLengthData", enabled = !DEBUG) public void testHardClipReadLengthIsRight(final int originalReadLength, final int nToClip) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(originalReadLength + "M"); read.getReadLength(); // provoke the caching of the read length @@ -112,7 +113,7 @@ public class ReadClipperUnitTest extends BaseTest { clipped.getReadLength(), clipped.getCigar(), expectedReadLength, nToClip, read.getReadLength(), read.getCigar())); } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -135,7 +136,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesLeftTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -154,7 +155,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesRightTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -172,7 +173,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipLowQualEnds() { final byte LOW_QUAL = 2; final byte HIGH_QUAL = 30; @@ -216,7 +217,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipSoftClippedBases() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -251,7 +252,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBases() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -273,7 +274,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBasesWithThreshold() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -292,6 +293,40 @@ public class ReadClipperUnitTest extends BaseTest { } } + @DataProvider(name = "RevertSoftClipsBeforeContig") + public Object[][] makeRevertSoftClipsBeforeContig() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + for ( int softStart : Arrays.asList(-10, -1, 0) ) { + for ( int alignmentStart : Arrays.asList(1, 10) ) { + tests.add(new Object[]{softStart, alignmentStart}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "RevertSoftClipsBeforeContig") + public void testRevertSoftClippedBasesBeforeStartOfContig(final int softStart, final int alignmentStart) { + final int nMatches = 10; + final int nSoft = -1 * (softStart - alignmentStart); + final String cigar = nSoft + "S" + nMatches + "M"; + final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); + read.setAlignmentStart(alignmentStart); + + Assert.assertEquals(read.getSoftStart(), softStart); + Assert.assertEquals(read.getAlignmentStart(), alignmentStart); + Assert.assertEquals(read.getCigarString(), cigar); + + final GATKSAMRecord reverted = ReadClipper.revertSoftClippedBases(read); + + final int expectedAlignmentStart = 1; + final String expectedCigar = (1 - softStart) + "H" + read.getAlignmentEnd() + "M"; + Assert.assertEquals(reverted.getSoftStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getAlignmentStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getCigarString(), expectedCigar); + } private void assertNoLowQualBases(GATKSAMRecord read, byte low_qual) { if (!read.isEmpty()) { @@ -375,7 +410,7 @@ public class ReadClipperUnitTest extends BaseTest { } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipReducedRead() { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("10M"); final int[] counts = new int[read.getReadLength()]; @@ -391,7 +426,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertEntirelySoftclippedReads() { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H"); GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read); diff --git a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java index e9600480a..0886427ca 100644 --- a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java @@ -26,6 +26,7 @@ package org.broadinstitute.sting.utils.fragments; import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -296,4 +297,51 @@ public class FragmentUtilsUnitTest extends BaseTest { final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); Assert.assertNull(actual); } + + @DataProvider(name = "MergeFragmentsOffContig") + public Object[][] makeMergeFragmentsOffContig() throws Exception { + List tests = new ArrayList<>(); + + for ( final int pre1 : Arrays.asList(0, 50)) { + for ( final int post1 : Arrays.asList(0, 50)) { + for ( final int pre2 : Arrays.asList(0, 50)) { + for ( final int post2 : Arrays.asList(0, 50)) { + tests.add(new Object[]{pre1, post1, pre2, post2}); + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "MergeFragmentsOffContig") + public void testMergeFragmentsOffContig(final int pre1, final int post1, final int pre2, final int post2) { + final int contigSize = 10; + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 0, contigSize); + + final GATKSAMRecord read1 = createReadOffContig(header, false, pre1, post1); + final GATKSAMRecord read2 = createReadOffContig(header, true, pre2, post2); + + final GATKSAMRecord merged = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); + } + + private GATKSAMRecord createReadOffContig(final SAMFileHeader header, final boolean negStrand, final int pre, final int post) { + final int contigLen = header.getSequence(0).getSequenceLength(); + final int readLen = pre + contigLen + post; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, readLen); + read.setAlignmentStart(1); + read.setCigar(TextCigarCodec.getSingleton().decode(pre + "S" + contigLen + "M" + post + "S")); + read.setBaseQualities(Utils.dupBytes((byte) 30, readLen)); + read.setReadBases(Utils.dupBytes((byte)'A', readLen)); + read.setMappingQuality(60); + read.setMateAlignmentStart(1); + read.setProperPairFlag(true); + read.setReadPairedFlag(true); + read.setInferredInsertSize(30); + read.setReadNegativeStrandFlag(negStrand); + read.setMateNegativeStrandFlag(! negStrand); + read.setReadGroup(new GATKSAMReadGroupRecord("foo")); + return read; + } } From 34bdf20132baebb1567a09605d3928b9841130be Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 5 Jun 2013 16:37:31 -0400 Subject: [PATCH 34/99] Bugfix for bad AD values in UG/HC -- In the case where we have multiple potential alternative alleles *and* we weren't calling all of them (so that n potential values < n called) we could end up trimming the alleles down which would result in the mismatch between the PerReadAlleleLikelihoodMap alleles and the VariantContext trimmed alleles. -- Fixed by doing two things (1) moving the trimming code after the annotation call and (2) updating AD annotation to check that the alleles in the VariantContext and the PerReadAlleleLikelihoodMap are concordant, which will stop us from degenerating in the future. -- delivers [#50897077] --- .../annotator/DepthPerAlleleBySample.java | 29 ++++++++++--------- .../genotyper/UnifiedGenotyperEngine.java | 10 +++---- .../haplotypecaller/GenotypingEngine.java | 7 ++--- ...perGeneralPloidySuite1IntegrationTest.java | 2 +- ...perGeneralPloidySuite2IntegrationTest.java | 2 +- ...GenotyperNormalCallingIntegrationTest.java | 4 +-- ...dGenotyperReducedReadsIntegrationTest.java | 2 +- .../HaplotypeCallerIntegrationTest.java | 2 +- .../genotyper/PerReadAlleleLikelihoodMap.java | 8 +++++ 9 files changed, 37 insertions(+), 29 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java index 1cf91f181..b22ea7931 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java @@ -66,10 +66,7 @@ import org.broadinstitute.variant.variantcontext.Genotype; import org.broadinstitute.variant.variantcontext.GenotypeBuilder; import org.broadinstitute.variant.variantcontext.VariantContext; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** @@ -135,20 +132,24 @@ public class DepthPerAlleleBySample extends GenotypeAnnotation implements Standa } private void annotateWithLikelihoods(final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, final VariantContext vc, final GenotypeBuilder gb) { - final HashMap alleleCounts = new HashMap(); + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! perReadAlleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + perReadAlleleLikelihoodMap.getAllelesSet()); + + final HashMap alleleCounts = new HashMap<>(); + for ( final Allele allele : vc.getAlleles() ) { alleleCounts.put(allele, 0); } - for ( final Allele allele : vc.getAlleles() ) { - alleleCounts.put(allele, 0); - } for (Map.Entry> el : perReadAlleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if (! a.isInformative() ) continue; // read is non-informative final GATKSAMRecord read = el.getKey(); - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - if (!vc.getAlleles().contains(a.getMostLikelyAllele())) - continue; // sanity check - shouldn't be needed - alleleCounts.put(a.getMostLikelyAllele(), alleleCounts.get(a.getMostLikelyAllele()) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1)); + final int prevCount = alleleCounts.get(a.getMostLikelyAllele()); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + alleleCounts.put(a.getMostLikelyAllele(), prevCount + incCount); } + final int[] counts = new int[alleleCounts.size()]; counts[0] = alleleCounts.get(vc.getReference()); for (int i = 0; i < vc.getAlternateAlleles().size(); i++) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java index fc11706e5..3d9f75d45 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java @@ -543,11 +543,6 @@ public class UnifiedGenotyperEngine { builder.attributes(attributes); VariantContext vcCall = builder.make(); - // if we are subsetting alleles (either because there were too many or because some were not polymorphic) - // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). - if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync - vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); - if ( annotationEngine != null && !limitedContext ) { // limitedContext callers need to handle annotations on their own by calling their own annotationEngine // Note: we want to use the *unfiltered* and *unBAQed* context for the annotations final ReadBackedPileup pileup = rawContext.getBasePileup(); @@ -556,6 +551,11 @@ public class UnifiedGenotyperEngine { vcCall = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall, perReadAlleleLikelihoodMap); } + // if we are subsetting alleles (either because there were too many or because some were not polymorphic) + // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). + if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync + vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); + return new VariantCallContext(vcCall, confidentlyCalled(phredScaledConfidence, PoFGT0)); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 9bb456230..cbcba28fd 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -204,13 +204,12 @@ public class GenotypingEngine { convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, 0.0 ) ); final Map stratifiedReadMap = filterToOnlyOverlappingReads( genomeLocParser, alleleReadMap_annotations, perSampleFilteredReadList, call ); - VariantContext annotatedCall = call; - if( annotatedCall.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! + VariantContext annotatedCall = annotationEngine.annotateContext(stratifiedReadMap, call); + + if( call.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall); } - annotatedCall = annotationEngine.annotateContext(stratifiedReadMap, annotatedCall); - // maintain the set of all called haplotypes for ( final Allele calledAllele : call.getAlleles() ) calledHaplotypes.addAll(alleleMapper.get(calledAllele)); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index 1cfc41a27..c791d08ae 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -79,6 +79,6 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy1_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "66a5a3eb657fac5c621bc0c228ea9caf"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "353c97bfb05a939b3838dc8eee50326b"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java index 64568d714..1022b6e15 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java @@ -58,7 +58,7 @@ public class UnifiedGenotyperGeneralPloidySuite2IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy3_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","5eabc12fc7b4f9749e6d1be0f5b45d14"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","7e4e1397d5cff68aeba3595e671574fc"); } @Test(enabled = true) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index 907af0f34..a52176a08 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -96,7 +96,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("1ab95513a3abb5b760578831c61ef94b")); + Arrays.asList("f576d86656cc37c0a869c7ac911f4c7c")); executeTest("test Multiple SNP alleles", spec); } @@ -112,7 +112,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testReverseTrim() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1, - Arrays.asList("314b99eb146de1fdafed872ecbe1cfc2")); + Arrays.asList("94d7a907fdca7e8c9fd6bb8a87b2bab2")); executeTest("test reverse trim", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java index 5f9667cca..b9830de8e 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java @@ -74,7 +74,7 @@ public class UnifiedGenotyperReducedReadsIntegrationTest extends WalkerTest { @Test public void testReducedBamINDELs() { - testReducedCalling("INDEL", "19bc6a74250ec19efc4e1b4ee6515ac0"); + testReducedCalling("INDEL", "22110b001e2d3dd45d7872334086b2b9"); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 5fc0f4f52..d0c7228ae 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -96,7 +96,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "ffd69c410dca0d2f9fe75f3cb5d08179"); + "627b5a12f2f02a874fb39982171a3982"); } @Test diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index b309ef633..8067d67bc 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -366,4 +366,12 @@ public class PerReadAlleleLikelihoodMap { return true; } + + /** + * Get an unmodifiable set of the unique alleles in this PerReadAlleleLikelihoodMap + * @return a non-null unmodifiable map + */ + public Set getAllelesSet() { + return Collections.unmodifiableSet(allelesSet); + } } From 209dd64268b208ece6020045b720cb677d949487 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 5 Jun 2013 17:43:31 -0400 Subject: [PATCH 35/99] HaplotypeCaller now emits per-sample DP -- Created a new annotation DepthPerSampleHC that is by default on in the HaplotypeCaller -- The depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot differentiate between reads that align over the event but aren't informative vs. those that aren't even close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). -- Update MD5s -- delivers [#48240601] --- .../walkers/annotator/DepthPerSampleHC.java | 126 ++++++++++++++++++ .../haplotypecaller/HaplotypeCaller.java | 2 +- ...lexAndSymbolicVariantsIntegrationTest.java | 6 +- .../HaplotypeCallerIntegrationTest.java | 18 +-- 4 files changed, 139 insertions(+), 13 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java new file mode 100644 index 000000000..9bd641011 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java @@ -0,0 +1,126 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.Genotype; +import org.broadinstitute.variant.variantcontext.GenotypeBuilder; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.broadinstitute.variant.vcf.VCFFormatHeaderLine; +import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; + +import java.util.*; + + +/** + * The depth of coverage of each allele per sample + * + * the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + * differentiate between reads that align over the event but aren't informative vs. those that aren't even + * close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + */ +public class DepthPerSampleHC extends GenotypeAnnotation { + public void annotate(final RefMetaDataTracker tracker, + final AnnotatorCompatible walker, + final ReferenceContext ref, + final AlignmentContext stratifiedContext, + final VariantContext vc, + final Genotype g, + final GenotypeBuilder gb, + final PerReadAlleleLikelihoodMap alleleLikelihoodMap) { + if ( g == null || !g.isCalled() || ( stratifiedContext == null && alleleLikelihoodMap == null) ) + return; + + if (alleleLikelihoodMap == null ) + throw new IllegalStateException("DepthPerSampleHC can only be used with likelihood based annotations in the HaplotypeCaller"); + + // the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + // differentiate between reads that align over the event but aren't informative vs. those that aren't even + // close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + int dp = 0; + + if ( alleleLikelihoodMap.isEmpty() ) { + // there are no reads + } else { + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! alleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + alleleLikelihoodMap.getAllelesSet()); + + for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if ( a.isInformative() ) { + final GATKSAMRecord read = el.getKey(); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + dp += incCount; + } + } + + gb.DP(dp); + } + } + + public List getKeyNames() { + return Collections.singletonList(VCFConstants.DEPTH_KEY); + } + + public List getDescriptions() { + return Collections.singletonList(VCFStandardHeaderLines.getFormatLine(getKeyNames().get(0))); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 73367f8c3..182e59493 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -223,7 +223,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ @Advanced @Argument(fullName="annotation", shortName="A", doc="One or more specific annotations to apply to variant calls", required=false) - protected List annotationsToUse = new ArrayList(Arrays.asList(new String[]{"ClippingRankSumTest"})); + protected List annotationsToUse = new ArrayList<>(Arrays.asList(new String[]{"ClippingRankSumTest", "DepthPerSampleHC"})); /** * Which annotations to exclude from output in the VCF file. Note that this argument has higher priority than the -A or -G arguments, diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index 3f3b295f8..fba294c3d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "fc11b553fbf16beac0da04a69f419365"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "8d7728909b1b8eb3f30f2f1583f054a8"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "38b4596c3910fdde51ea59aa1a8f848f"); + "db71826dc798ff1cdf0c5d05b0ede976"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "08147870d73d9749ced8cfc7cdd4714f"); + "42831d5463552911b7da9de0b4a27289"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index d0c7228ae..77be9fba2 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -80,12 +80,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "37e462379de17bc6c8aeeed6e9735dd3"); + HCTest(CEUTRIO_BAM, "", "1b15e4647013ab2c3ce7073c420d8640"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "983a0d122714d4aa0ff7af20cc686703"); + HCTest(NA12878_BAM, "", "423be27dc2cf7fd10baf465cf93e18e2"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -96,7 +96,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "627b5a12f2f02a874fb39982171a3982"); + "a28e6f14e28708283d61c1e423bbdcb1"); } @Test @@ -112,7 +112,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "ce602282e80cca6d4272f940e20e90c3"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "8344d86751b707c53b296c297eba4bfa"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -149,7 +149,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "09335c01d2e90714af7f4c91156da0b1"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "dea98f257d39fa1447a12c36a6bbf4a3"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -159,14 +159,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void HCTestProblematicReadsModifiedInActiveRegions() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("b34ddc93a7b9919e05da499508f44dd9")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("7cd1c5e2642ae8ddf38932aba1f51d69")); executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec); } @Test public void HCTestStructuralIndels() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "AFR.structural.indels.bam") + " --no_cmdline_in_header -o %s -minPruning 6 -L 20:8187565-8187800 -L 20:18670537-18670730"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("98a78b9f58ab197b827ef2ce3ab043d3")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("ee55ff4c6ec1bbef88e21cc0f45d4c47")); executeTest("HCTestStructuralIndels: ", spec); } @@ -188,7 +188,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("6e6ef6e0326bee6d20d9fd37349fdb8c")); + Arrays.asList("4886a98bf699f4e7f4491160749ada6a")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -196,7 +196,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("5e535983b2f7e5fb6c84fecffa092324")); + Arrays.asList("86bdd07a3ac4f6ce239c30efea8bf5ba")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } } From 00c06e9e52f416599bb9b906c32857848a9abd39 Mon Sep 17 00:00:00 2001 From: Michael McCowan Date: Tue, 4 Jun 2013 10:08:24 -0400 Subject: [PATCH 36/99] Performance improvements: - Memoized MathUtil's cumulative binomial probability function. - Reduced the default size of the read name map in reduced reads and handle its resets more efficiently. --- .../compression/reducereads/ReduceReads.java | 17 +++- .../broadinstitute/sting/utils/MathUtils.java | 77 +++++++++++++++---- .../sting/utils/MathUtilsUnitTest.java | 45 +++++++++-- 3 files changed, 112 insertions(+), 27 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java index eb55701ae..e636f8f17 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java @@ -273,8 +273,9 @@ public class ReduceReads extends ReadWalker, Redu int nCompressedReads = 0; - Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). + private static int READ_NAME_HASH_DEFAULT_SIZE = 1000; Long nextReadNumber = 1L; // The next number to use for the compressed read name. + Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). ObjectSortedSet intervalList; @@ -313,7 +314,7 @@ public class ReduceReads extends ReadWalker, Redu knownSnpPositions = new ObjectAVLTreeSet(); GenomeAnalysisEngine toolkit = getToolkit(); - readNameHash = new Object2LongOpenHashMap(100000); // prepare the read name hash to keep track of what reads have had their read names compressed + this.resetReadNameHash(); // prepare the read name hash to keep track of what reads have had their read names compressed intervalList = new ObjectAVLTreeSet(); // get the interval list from the engine. If no interval list was provided, the walker will work in WGS mode if (toolkit.getIntervals() != null) @@ -335,6 +336,16 @@ public class ReduceReads extends ReadWalker, Redu } } + /** Initializer for {@link #readNameHash}. */ + private void resetReadNameHash() { + // If the hash grows large, subsequent clear operations can be very expensive, so trim the hash down if it grows beyond its default. + if (readNameHash == null || readNameHash.size() > READ_NAME_HASH_DEFAULT_SIZE) { + readNameHash = new Object2LongOpenHashMap(READ_NAME_HASH_DEFAULT_SIZE); + } else { + readNameHash.clear(); + } + } + /** * Takes in a read and prepares it for the SlidingWindow machinery by performing the * following optional clipping operations: @@ -471,7 +482,7 @@ public class ReduceReads extends ReadWalker, Redu // stash.compress(), the readNameHash can be cleared after the for() loop above. // The advantage of clearing the hash is that otherwise it holds all reads that have been encountered, // which can use a lot of memory and cause RR to slow to a crawl and/or run out of memory. - readNameHash.clear(); + this.resetReadNameHash(); } } else diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index dfd3537da..07aff5983 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -29,9 +29,8 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; -import org.broadinstitute.sting.utils.exceptions.UserException; -import java.lang.IllegalArgumentException; +import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.*; @@ -417,9 +416,35 @@ public class MathUtils { return log10BinomialCoefficient(n, k) + (n * FAIR_BINOMIAL_PROB_LOG10_0_5); } + /** A memoization container for {@link #binomialCumulativeProbability(int, int, int)}. Synchronized to accomodate multithreading. */ + private static final Map BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE = + Collections.synchronizedMap(new LRUCache(10_000)); + + /** + * Primitive integer-triplet bijection into long. Returns null when the bijection function fails (in lieu of an exception), which will + * happen when: any value is negative or larger than a short. This method is optimized for speed; it is not intended to serve as a + * utility function. + */ + @Nullable + static Long fastGenerateUniqueHashFromThreeIntegers(final int one, final int two, final int three) { + if (one < 0 || two < 0 || three < 0 || Short.MAX_VALUE < one || Short.MAX_VALUE < two || Short.MAX_VALUE < three) { + return null; + } else { + long result = 0; + result += (short) one; + result <<= 16; + result += (short) two; + result <<= 16; + result += (short) three; + return result; + } + } + /** * Performs the cumulative sum of binomial probabilities, where the probability calculation is done in log space. * Assumes that the probability of a successful hit is fair (i.e. 0.5). + * + * This pure function is memoized because of its expensive BigDecimal calculations. * * @param n number of attempts for the number of hits * @param k_start start (inclusive) of the cumulant sum (over hits) @@ -430,23 +455,41 @@ public class MathUtils { if ( k_end > n ) throw new IllegalArgumentException(String.format("Value for k_end (%d) is greater than n (%d)", k_end, n)); - double cumProb = 0.0; - double prevProb; - BigDecimal probCache = BigDecimal.ZERO; - - for (int hits = k_start; hits <= k_end; hits++) { - prevProb = cumProb; - final double probability = binomialProbability(n, hits); - cumProb += probability; - if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision - probCache = probCache.add(new BigDecimal(prevProb)); - cumProb = 0.0; - hits--; // repeat loop - // prevProb changes at start of loop - } + // Fetch cached value, if applicable. + final Long memoizationKey = fastGenerateUniqueHashFromThreeIntegers(n, k_start, k_end); + final Double memoizationCacheResult; + if (memoizationKey != null) { + memoizationCacheResult = BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.get(memoizationKey); + } else { + memoizationCacheResult = null; } - return probCache.add(new BigDecimal(cumProb)).doubleValue(); + final double result; + if (memoizationCacheResult != null) { + result = memoizationCacheResult; + } else { + double cumProb = 0.0; + double prevProb; + BigDecimal probCache = BigDecimal.ZERO; + + for (int hits = k_start; hits <= k_end; hits++) { + prevProb = cumProb; + final double probability = binomialProbability(n, hits); + cumProb += probability; + if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision + probCache = probCache.add(new BigDecimal(prevProb)); + cumProb = 0.0; + hits--; // repeat loop + // prevProb changes at start of loop + } + } + + result = probCache.add(new BigDecimal(cumProb)).doubleValue(); + if (memoizationKey != null) { + BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.put(memoizationKey, result); + } + } + return result; } /** diff --git a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java index e4c74a0ad..3933b3830 100644 --- a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java @@ -41,6 +41,35 @@ public class MathUtilsUnitTest extends BaseTest { public void init() { } + /** + * Tests that we get unqiue values for the valid (non-null-producing) input space for {@link MathUtils#fastGenerateUniqueHashFromThreeIntegers(int, int, int)}. + */ + @Test + public void testGenerateUniqueHashFromThreePositiveIntegers() { + logger.warn("Executing testGenerateUniqueHashFromThreePositiveIntegers"); + + final Set observedLongs = new HashSet(); + for (short i = 0; i < Byte.MAX_VALUE; i++) { + for (short j = 0; j < Byte.MAX_VALUE; j++) { + for (short k = 0; k < Byte.MAX_VALUE; k++) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + //System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + + for (short i = Byte.MAX_VALUE; i <= Short.MAX_VALUE && i > 0; i += 128) { + for (short j = Byte.MAX_VALUE; j <= Short.MAX_VALUE && j > 0; j += 128) { + for (short k = Byte.MAX_VALUE; k <= Short.MAX_VALUE && k > 0; k += 128) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + // System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + } + /** * Tests that we get the right values from the binomial distribution */ @@ -64,13 +93,15 @@ public class MathUtilsUnitTest extends BaseTest { public void testCumulativeBinomialProbability() { logger.warn("Executing testCumulativeBinomialProbability"); - final int numTrials = 10; - for ( int i = 0; i < numTrials; i++ ) - Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); - - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + for (int j = 0; j < 2; j++) { // Test memoizing functionality, as well. + final int numTrials = 10; + for ( int i = 0; i < numTrials; i++ ) + Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); + + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + } } /** From 96073c30587a4061eed5950897790d32a178aef6 Mon Sep 17 00:00:00 2001 From: Valentin Ruano-Rubio Date: Thu, 23 May 2013 20:39:32 -0400 Subject: [PATCH 37/99] This commit addresses JIRA issue GSA-948: Prevent users from doing the wrong thing with RNA-Seq data and the GATK. The previous behavior is to process reads with N CIGAR operators as they are despite that many of the tools do not actually support such operator and results become unpredictible. Now if the there is some read with the N operator, the engine returns a user exception. The error message indicates what is the problem (including the offending read and mapping position) and give a couple of alternatives that the user can take in order to move forward: a) ask for those reads to be filtered out (with --filter_reads_with_N_cigar or -filterRNC) b) keep them in as before (with -U ALLOW_N_CIGAR_READS or -U ALL) Notice that (b) does not have any effect if (a) is enacted; i.e. filtering overrides ignoring. Implementation: * Added filterReadsWithMCigar argument to MalformedReadFilter with the corresponding changes in the code to get it to work. * Added ALLOW_N_CIGAR_READS unsafe flag so that N cigar containing reads can be processed as they are if that is what the user wants. * Added ReadFilterTest class commont parent for ReadFilter test cases. * Refactor ReadGroupBlackListFilterUnitTest to extend ReadFilterTest and push up some functionality to that class. * Modified MalformedReadFilterUnitTest to extend ReadFilterTest and to test the new filter functionality. * Added AllowNCigarMalformedReadFilterUnittest to check on the behavior when the unsafe ALLOW_N_CIGAR_READS flag is used. * Added UnsafeNCigarMalformedReadFilterUnittest to check on the behavior when the unsafe ALL flag is used. * Updated a broken test case in UnifiedGenotyperIntegrationTest resulting from the new behavior. * Updated EngineFeaturesIntegrationTest testdata to be compliant with new behavior --- .../UnifiedGenotyperIntegrationTest.java | 5 +- .../gatk/arguments/ValidationExclusion.java | 2 + .../gatk/filters/MalformedReadFilter.java | 118 +++++- .../sting/utils/exceptions/UserException.java | 14 + .../gatk/EngineFeaturesIntegrationTest.java | 1 + ...llowNCigarMalformedReadFilterUnitTest.java | 77 ++++ .../filters/MalformedReadFilterUnitTest.java | 190 ++++++++- .../sting/gatk/filters/ReadFilterTest.java | 370 ++++++++++++++++++ .../ReadGroupBlackListFilterUnitTest.java | 88 ++--- .../UnsafeMalformedReadFilterUnitTest.java | 50 +++ 10 files changed, 836 insertions(+), 79 deletions(-) create mode 100644 public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java create mode 100644 public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java create mode 100644 public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index d55a923dc..300d7f5da 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -288,9 +288,10 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { @Test public void testNsInCigar() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "testWithNs.bam -o %s -L 8:141813600-141813700 -out_mode EMIT_ALL_SITES", 1, - Arrays.asList("2ae3fd39c53a6954d32faed8703adfe8")); + UserException.UnsupportedCigarOperatorException.class); + executeTest("test calling on reads with Ns in CIGAR", spec); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java index f8f56f89e..75a68d978 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java @@ -36,6 +36,8 @@ public class ValidationExclusion { // our validation options public enum TYPE { + ALLOW_N_CIGAR_READS, // ignore the presence of N operators in CIGARs: do not blow up and process reads that contain one or more N operators. + // This exclusion does not have effect on reads that get filtered {@see MalformedReadFilter}. ALLOW_UNINDEXED_BAM, // allow bam files that do not have an index; we'll traverse them using monolithic shard ALLOW_UNSET_BAM_SORT_ORDER, // assume that the bam is sorted, even if the SO (sort-order) flag is not set NO_READ_ORDER_VERIFICATION, // do not validate that the reads are in order as we take them from the bam file diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java index f7d1d0297..a15870a22 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java @@ -25,14 +25,16 @@ package org.broadinstitute.sting.gatk.filters; -import net.sf.samtools.SAMFileHeader; -import net.sf.samtools.SAMRecord; -import net.sf.samtools.SAMSequenceRecord; -import net.sf.samtools.SAMTagUtil; +import net.sf.samtools.*; import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.exceptions.UserException; +import java.util.Collections; + /** * Filter out malformed reads. * @@ -40,20 +42,46 @@ import org.broadinstitute.sting.utils.exceptions.UserException; * @version 0.1 */ public class MalformedReadFilter extends ReadFilter { + + + private static final String FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME = "filter_reads_with_N_cigar" ; + private SAMFileHeader header; + @Argument(fullName = FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME, shortName = "filterRNC", doc = "filter out reads with CIGAR containing the N operator, instead of stop processing and report an error.", required = false) + boolean filterReadsWithNCigar = false; + + @Argument(fullName = "filter_mismatching_base_and_quals", shortName = "filterMBQ", doc = "if a read has mismatching number of bases and base qualities, filter out the read instead of blowing up.", required = false) boolean filterMismatchingBaseAndQuals = false; @Argument(fullName = "filter_bases_not_stored", shortName = "filterNoBases", doc = "if a read has no stored bases (i.e. a '*'), filter out the read instead of blowing up.", required = false) boolean filterBasesNotStored = false; + /** + * Indicates the applicable validation exclusions + */ + private boolean allowNCigars; + @Override - public void initialize(GenomeAnalysisEngine engine) { - this.header = engine.getSAMFileHeader(); + public void initialize(final GenomeAnalysisEngine engine) { + header = engine.getSAMFileHeader(); + ValidationExclusion validationExclusions = null; + final SAMDataSource rds = engine.getReadsDataSource(); + if (rds != null) { + final ReadProperties rps = rds.getReadsInfo(); + if (rps != null) { + validationExclusions = rps.getValidationExclusionList(); + } + } + if (validationExclusions == null) { + allowNCigars = false; + } else { + allowNCigars = validationExclusions.contains(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS); + } } - public boolean filterOut(SAMRecord read) { + public boolean filterOut(final SAMRecord read) { // slowly changing the behavior to blow up first and filtering out if a parameter is explicitly provided return !checkInvalidAlignmentStart(read) || !checkInvalidAlignmentEnd(read) || @@ -61,7 +89,8 @@ public class MalformedReadFilter extends ReadFilter { !checkHasReadGroup(read) || !checkMismatchingBasesAndQuals(read, filterMismatchingBaseAndQuals) || !checkCigarDisagreesWithAlignment(read) || - !checkSeqStored(read, filterBasesNotStored); + !checkSeqStored(read, filterBasesNotStored) || + !checkCigarIsSupported(read,filterReadsWithNCigar,allowNCigars); } private static boolean checkHasReadGroup(final SAMRecord read) { @@ -80,7 +109,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read start is valid, false otherwise. */ - private static boolean checkInvalidAlignmentStart( SAMRecord read ) { + private static boolean checkInvalidAlignmentStart(final SAMRecord read ) { // read is not flagged as 'unmapped', but alignment start is NO_ALIGNMENT_START if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START ) return false; @@ -95,7 +124,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read end is valid, false otherwise. */ - private static boolean checkInvalidAlignmentEnd( SAMRecord read ) { + private static boolean checkInvalidAlignmentEnd(final SAMRecord read ) { // Alignment aligns to negative number of bases in the reference. if( !read.getReadUnmappedFlag() && read.getAlignmentEnd() != -1 && (read.getAlignmentEnd()-read.getAlignmentStart()+1)<0 ) return false; @@ -108,11 +137,11 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to verify. * @return true if alignment agrees with header, false othrewise. */ - private static boolean checkAlignmentDisagreesWithHeader( SAMFileHeader header, SAMRecord read ) { + private static boolean checkAlignmentDisagreesWithHeader(final SAMFileHeader header, final SAMRecord read ) { // Read is aligned to nonexistent contig if( read.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START ) return false; - SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); + final SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); // Read is aligned to a point after the end of the contig if( !read.getReadUnmappedFlag() && read.getAlignmentStart() > contigHeader.getSequenceLength() ) return false; @@ -124,7 +153,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if cigar agrees with alignment, false otherwise. */ - private static boolean checkCigarDisagreesWithAlignment(SAMRecord read) { + private static boolean checkCigarDisagreesWithAlignment(final SAMRecord read) { // Read has a valid alignment start, but the CIGAR string is empty if( !read.getReadUnmappedFlag() && read.getAlignmentStart() != -1 && @@ -134,13 +163,72 @@ public class MalformedReadFilter extends ReadFilter { return true; } + /** + * Check for unsupported CIGAR operators. + * Currently the N operator is not supported. + * @param read The read to validate. + * @param filterReadsWithNCigar whether the offending read should just + * be silently filtered or not. + * @param allowNCigars whether reads that contain N operators in their CIGARs + * can be processed or an exception should be thrown instead. + * @throws UserException.UnsupportedCigarOperatorException + * if {@link #filterReadsWithNCigar} is false and + * the input read has some unsupported operation. + * @return true if the read CIGAR operations are + * fully supported, otherwise false, as long as + * no exception has been thrown. + */ + private static boolean checkCigarIsSupported(final SAMRecord read, final boolean filterReadsWithNCigar, final boolean allowNCigars) { + if( containsNOperator(read)) { + if (! filterReadsWithNCigar && !allowNCigars) { + throw new UserException.UnsupportedCigarOperatorException( + CigarOperator.N,read, + "Perhaps you are" + + " trying to use RNA-Seq data?" + + " While we are currently actively working to" + + " support this data type unfortunately the" + + " GATK cannot be used with this data in its" + + " current form. You have the option of either" + + " filtering out all reads with operator " + + CigarOperator.N + " in their CIGAR string" + + " (please add --" + + FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME + + " to your command line) or" + + " assume the risk of processing those reads as they" + + " are including the pertinent unsafe flag (please add -U" + + ' ' + ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS + + " to your command line). Notice however that if you were" + + " to choose the latter, an unspecified subset of the" + + " analytical outputs of an unspecified subset of the tools" + + " will become unpredictable. Consequently the GATK team" + + " might well not be able to provide you with the usual support" + + " with any issue regarding any output"); + } + return ! filterReadsWithNCigar; + } + return true; + } + + private static boolean containsNOperator(final SAMRecord read) { + final Cigar cigar = read.getCigar(); + if (cigar == null) { + return false; + } + for (final CigarElement ce : cigar.getCigarElements()) { + if (ce.getOperator() == CigarOperator.N) { + return true; + } + } + return false; + } + /** * Check if the read has the same number of bases and base qualities * @param read the read to validate * @return true if they have the same number. False otherwise. */ - private static boolean checkMismatchingBasesAndQuals(SAMRecord read, boolean filterMismatchingBaseAndQuals) { - boolean result; + private static boolean checkMismatchingBasesAndQuals(final SAMRecord read, final boolean filterMismatchingBaseAndQuals) { + final boolean result; if (read.getReadLength() == read.getBaseQualities().length) result = true; else if (filterMismatchingBaseAndQuals) diff --git a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java index 3abe5a7f4..0e95fd158 100644 --- a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java +++ b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils.exceptions; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMSequenceDictionary; @@ -87,6 +88,19 @@ public class UserException extends ReviewedStingException { } } + public static class UnsupportedCigarOperatorException extends UserException { + public UnsupportedCigarOperatorException(final CigarOperator co, final SAMRecord read, final String message) { + super(String.format( + "Unsupported CIGAR operator %s in read %s at %s:%d. %s", + co, + read.getReadName(), + read.getReferenceName(), + read.getAlignmentStart(), + message)); + } + } + + public static class MalformedGenomeLoc extends UserException { public MalformedGenomeLoc(String message, GenomeLoc loc) { super(String.format("Badly formed genome loc: %s: %s", message, loc)); diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 6cfa90d90..b5b82f869 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -131,6 +131,7 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { final String root = "-T ErrorThrowing -R " + exampleFASTA; final String args = root + cfg.args + " -E " + cfg.expectedException.getSimpleName(); WalkerTestSpec spec = new WalkerTestSpec(args, 0, cfg.expectedException); + executeTest(cfg.toString(), spec); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..d169bf7e9 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java @@ -0,0 +1,77 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALLOW_N_CIGAR_READS} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class AllowNCigarMalformedReadFilterUnitTest extends MalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS)); + } + + + @Test(enabled = true, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.IGNORE) + public void testCigarNOperatorFilterIgnore(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nContainingCigarRead), + "filters out N containing Cigar when it should ignore the fact"); + } + + @Test(enabled = false) + @Override + public void testCigarNOperatorFilterException(final String cigarString) { + // Nothing to do here. + // Just deactivates the parents test case. + } + + + + + + + +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java index 981d54d54..0d8515dde 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java @@ -25,11 +25,25 @@ package org.broadinstitute.sting.gatk.filters; -import org.broadinstitute.sting.utils.exceptions.UserException; + +import net.sf.samtools.Cigar; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMRecord; +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.exceptions.UserException.UnsupportedCigarOperatorException; + +import java.lang.annotation.*; +import java.lang.reflect.Method; +import java.util.*; /** @@ -38,14 +52,14 @@ import org.testng.annotations.Test; * @author Eric Banks * @since 3/14/13 */ -public class MalformedReadFilterUnitTest { +public class MalformedReadFilterUnitTest extends ReadFilterTest { ////////////////////////////////////// // Test the checkSeqStored() method // ////////////////////////////////////// @Test(enabled = true) - public void testcheckSeqStored () { + public void testCheckSeqStored () { final GATKSAMRecord goodRead = ArtificialSAMUtils.createArtificialRead(new byte[]{(byte)'A'}, new byte[]{(byte)'A'}, "1M"); final GATKSAMRecord badRead = ArtificialSAMUtils.createArtificialRead(new byte[]{}, new byte[]{}, "1M"); @@ -59,4 +73,174 @@ public class MalformedReadFilterUnitTest { Assert.assertTrue(false, "We should have exceptioned out in the previous line"); } catch (UserException e) { } } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.FILTER) + public void testCigarNOperatorFilterTruePositive(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertTrue(filter.filterOut(nContainingCigarRead), + " Did not filtered out a N containing CIGAR read"); + } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterTrueNegative(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead), + " Filtered out a non-N containing CIGAR read"); + } + + @Test(enabled = true, + expectedExceptions = UnsupportedCigarOperatorException.class, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.EXCEPTION) + public void testCigarNOperatorFilterException(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + + filter.filterOut(nContainingCigarRead); + } + + @Test(enabled = true, dataProvider="UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterControl(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead)); + } + + protected SAMRecord buildSAMRecord(final String cigarString) { + final Cigar nContainingCigar = TextCigarCodec.getSingleton().decode(cigarString); + return this.createRead(nContainingCigar, 1, 0, 10); + } + + protected MalformedReadFilter buildMalformedReadFilter(final boolean filterRNO) { + return buildMalformedReadFiter(filterRNO,new ValidationExclusion.TYPE[] {}); + } + + protected MalformedReadFilter buildMalformedReadFiter(boolean filterRNO, final ValidationExclusion.TYPE... excl) { + final ValidationExclusion ve = new ValidationExclusion(Arrays.asList(excl)); + + final MalformedReadFilter filter = new MalformedReadFilter(); + + final SAMFileHeader h = getHeader(); + final SAMDataSource ds = getDataSource(); + + final GenomeAnalysisEngine gae = new GenomeAnalysisEngine() { + @Override + public SAMFileHeader getSAMFileHeader() { + return h; + } + + @Override + public SAMDataSource getReadsDataSource() { + return ds; + } + }; + filter.initialize(gae); + filter.filterReadsWithNCigar = filterRNO; + return filter; + } + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.METHOD) + @Inherited + protected @interface CigarOperatorTest { + + enum Outcome { + ANY,ACCEPT,FILTER,EXCEPTION,IGNORE; + + public boolean appliesTo (String cigar) { + boolean hasN = cigar.indexOf('N') != -1; + switch (this) { + case ANY: return true; + case ACCEPT: return !hasN; + case IGNORE: return hasN; + case FILTER: + case EXCEPTION: + default: + return hasN; + + } + } + } + + Outcome value() default Outcome.ANY; + } + + /** + * Cigar test data for unsupported operator test. + * Each element of this array corresponds to a test case. In turn the first element of the test case array is the + * Cigar string for that test case and the second indicates whether it should be filtered due to the presence of a + * unsupported operator + */ + private static final String[] TEST_CIGARS = { + "101M10D20I10M", + "6M14N5M", + "1N", + "101M", + "110N", + "2N4M", + "4M2N", + "3M1I1M", + "1M2I2M", + "1M10N1I1M", + "1M1I1D", + "11N12M1I34M12N" + }; + + @DataProvider(name= "UnsupportedCigarOperatorDataProvider") + public Iterator unsupportedOperatorDataProvider(final Method testMethod) { + final CigarOperatorTest a = resolveCigarOperatorTestAnnotation(testMethod); + final List result = new LinkedList(); + for (final String cigarString : TEST_CIGARS) { + if (a == null || a.value().appliesTo(cigarString)) { + result.add(new Object[] { cigarString }); + } + } + return result.iterator(); + } + + /** + * Gets the most specific {@link CigarOperatorTest} annotation for the + * signature of the test method provided. + *

+ * This in-house implementation is required due to the fact that method + * annotations do not have inheritance. + * + * @param m targeted test method. + * @return null if there is no {@link CigarOperatorTest} + * annotation in this or overridden methods. + */ + private CigarOperatorTest resolveCigarOperatorTestAnnotation(final Method m) { + CigarOperatorTest res = m.getAnnotation(CigarOperatorTest.class); + if (res != null) { + return res; + } + Class c = this.getClass(); + Class p = c.getSuperclass(); + while (p != null && p != Object.class) { + try { + final Method met = p.getDeclaredMethod(m.getName(), + m.getParameterTypes()); + res = met.getAnnotation(CigarOperatorTest.class); + if (res != null) { + break; + } + } catch (NoSuchMethodException e) { + // Its ok; nothing to do here, just keep looking. + } + c = p; + p = c.getSuperclass(); + } + return res; + } + } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java new file mode 100644 index 000000000..5b6f67c42 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java @@ -0,0 +1,370 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; +import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; +import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; + +import java.util.*; + +/** + * Class ReadBaseTest + *

+ * This is the base test class for read filter test classes. All read + * filter test cases should extend from this + * class; it sets ups a header mock up to test read filtering. + * + * Feel free to override non-final method to modify the behavior + * (i.e. change how read group id are formatted, or complete a header). + * + *

+ * You can statically determine the number of read-group involved + * in the test by calling {@link #ReadFilterTest(int)} in you constructor. + *

+ * + * Notice that the same header object is shared by all test and + * it is initialized by Junit (calling {@link #beforeClass()}. + * + * @author Valentin Ruano Rubio + * @date May 23, 2013 + */ +public class ReadFilterTest extends BaseTest { + + private static final int DEFAULT_READ_GROUP_COUNT = 5; + private static final int DEFAULT_READER_COUNT = 1; + private static final String DEFAULT_READ_GROUP_PREFIX = "ReadGroup"; + private static final String DEFAULT_PLATFORM_UNIT_PREFIX = "Lane"; + private static final String DEFAULT_SAMPLE_NAME_PREFIX = "Sample"; + private static final String DEFAULT_PLATFORM_PREFIX = "Platform"; + private static final int DEFAULT_CHROMOSOME_COUNT = 1; + private static final int DEFAULT_CHROMOSOME_START_INDEX = 1; + private static final int DEFAULT_CHROMOSOME_SIZE = 1000; + private static final String DEFAULT_SAM_FILE_FORMAT = "readfile-%3d.bam"; + + private final int groupCount; + + private SAMFileHeader header; + + private SAMDataSource dataSource; + + /** + * Constructs a new read-filter test providing the number of read + * groups in the file. + * + * @param groupCount number of read-group in the fictional SAM file, + * must be equal or greater than 1. + */ + protected ReadFilterTest(final int groupCount) { + if (groupCount < 1) { + throw new IllegalArgumentException( + "the read group count must at least be 1"); + } + this.groupCount = groupCount; + } + + + /** + * Gets the data source. + * + * @throws IllegalStateException if the data source was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMDataSource getDataSource() { + checkDataSourceExists(); + return dataSource; + } + + /** + * Returns the mock-up SAM file header for testing. + * + * @throws IllegalStateException if the header was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMFileHeader getHeader() { + checkHeaderExists(); + return header; + } + + /** + * Construct a read filter test with the default number of groups + * ({@link #DEFAULT_READ_GROUP_COUNT}. + */ + public ReadFilterTest() { + this(DEFAULT_READ_GROUP_COUNT); + } + + /** + * Return the number of read groups involved in the test + * @return 1 or greater. + */ + protected final int getReadGroupCount() { + return groupCount; + } + + /** + * Composes the Id for the read group given its index. + * + * This methods must return a unique distinct ID for each possible index and + * it must be the same value each time it is invoked. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each possible + * read group index. + */ + protected String composeReadGroupId(final int index) { + checkReadGroupIndex(index); + return DEFAULT_READ_GROUP_PREFIX + index; + } + + /** + * Composes the Platform name for the read group given its index. + * + * This method must always return the same value give an index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_PREFIX + (((index-1)%2)+1); + } + + + /** + * Composes the Platform unit name for the read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformUnitName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_UNIT_PREFIX + (((index-1)%3)+1); + } + + + + /** + * Checks the correctness of a given read group index. + * + * A correct index is any value in the range [1,{@link #getReadGroupCount()}]. + * + * @param index the target index. + * @throws IllegalArgumentException if the input index is not correct. + */ + protected final void checkReadGroupIndex(final int index) { + checkIndex(index,groupCount,"read group"); + } + + + private void checkIndex(final int index, final int max, CharSequence name) { + if (index < 1 || index > max) { + throw new IllegalArgumentException( + name + " index (" + + index + + ") is out of bounds [1," + max + "]"); + } + } + + + /** + * Checks whether the header was initialized. + * + * @throws IllegalStateException if the header was not yet initialized. + */ + protected final void checkHeaderExists() { + if (header == null) { + throw new IllegalArgumentException( + "header has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Checks whether the data source was initialized. + * + * @throws IllegalStateException if the data source was not yet initialized. + */ + protected final void checkDataSourceExists() { + if (header == null) { + throw new IllegalArgumentException( + "data source has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Returns the ID for a read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each + * possible read group index. + */ + protected final String getReadGroupId(final int index) { + checkReadGroupIndex(index); + return getHeader().getReadGroups().get(index - 1).getReadGroupId(); + } + + /** + * Returns the platform name for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformName(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatform(); + } + + /** + * Returns the platform unit for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformUnit(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatformUnit(); + } + + + /** + * Composes the mock up SAM file header. + * + * It must return an equivalent (equal) value each time it is invoked. + * + * @return never null. + */ + protected SAMFileHeader composeHeader() { + + return ArtificialSAMUtils.createArtificialSamHeader( + DEFAULT_CHROMOSOME_COUNT, DEFAULT_CHROMOSOME_START_INDEX, + DEFAULT_CHROMOSOME_SIZE); + } + + @BeforeClass + public void beforeClass() { + + header = composeHeader(); + dataSource = composeDataSource(); + final List readGroupIDs = new ArrayList(); + final List sampleNames = new ArrayList(); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = composeReadGroupId(i); + readGroupIDs.add(readGroupId); + sampleNames.add(readGroupId); + } + + ArtificialSAMUtils.createEnumeratedReadGroups( + header, readGroupIDs, sampleNames); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = readGroupIDs.get(i-1); + final SAMReadGroupRecord groupRecord = header.getReadGroup(readGroupId); + groupRecord.setAttribute("PL", composePlatformName(i)); + groupRecord.setAttribute("PU", composePlatformUnitName(i)); + } + + } + + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(); + } + + protected SAMDataSource composeDataSource() { + checkHeaderExists(); + final Set readerIDs = new HashSet<>(1); + final ThreadAllocation ta = new ThreadAllocation(); + final Integer numFileHandles = 1; // I believe that any value would do but need to confirm. + final boolean useOriginalBaseQualities = true; + final SAMFileReader.ValidationStringency strictness = SAMFileReader.ValidationStringency.LENIENT; + final Integer readBufferSize = 1; // not relevant. + final DownsamplingMethod downsamplingMethod = DownsamplingMethod.NONE; + final ValidationExclusion exclusionList = composeValidationExclusion(); + final Collection supplementalFilters = Collections.EMPTY_SET; + final boolean includeReadsWithDeletionAtLoci = true; + + final GenomeLocParser glp = new GenomeLocParser(header.getSequenceDictionary()); + final SAMDataSource res = new SAMDataSource( + readerIDs, + ta, + numFileHandles, + glp, + useOriginalBaseQualities, + strictness, + readBufferSize, + downsamplingMethod, + exclusionList, + supplementalFilters, + includeReadsWithDeletionAtLoci); + + return res; + } + + @AfterClass + public void afterClass() { + header = null; + dataSource = null; + } + + /** + * Creates a read record. + * + * @param cigar the new record CIGAR. + * @param group the new record group index that must be in the range \ + * [1,{@link #getReadGroupCount()}] + * @param reference the reference sequence index (0-based) + * @param start the start position of the read alignment in the reference + * (1-based) + * @return never null + */ + protected SAMRecord createRead(final Cigar cigar, final int group, final int reference, final int start) { + final SAMRecord record = ArtificialSAMUtils.createArtificialRead(cigar); + record.setHeader(getHeader()); + record.setAlignmentStart(start); + record.setReferenceIndex(reference); + record.setAttribute(SAMTag.RG.toString(), getReadGroupId(group)); + return record; + + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java index 1370aeb50..1be31b293 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java @@ -26,13 +26,10 @@ package org.broadinstitute.sting.gatk.filters; import org.testng.Assert; -import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMReadGroupRecord; @@ -40,34 +37,7 @@ import java.util.List; import java.util.ArrayList; import java.util.Collections; -public class ReadGroupBlackListFilterUnitTest extends BaseTest { - private static final int READ_GROUP_COUNT = 5; - private static final String READ_GROUP_PREFIX = "ReadGroup"; - private static final String SAMPLE_NAME_PREFIX = "Sample"; - private static final String PLATFORM_PREFIX = "Platform"; - private static final String PLATFORM_UNIT_PREFIX = "Lane"; - private static SAMFileHeader header; - - @BeforeClass - public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); - - List readGroupIDs = new ArrayList(); - List sampleNames = new ArrayList(); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - readGroupIDs.add(READ_GROUP_PREFIX + i); - sampleNames.add(SAMPLE_NAME_PREFIX + i); - } - - ArtificialSAMUtils.createEnumeratedReadGroups(header, readGroupIDs, sampleNames); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + i); - groupRecord.setAttribute("PL", PLATFORM_PREFIX + (((i-1)%2)+1)); - groupRecord.setAttribute("PU", PLATFORM_UNIT_PREFIX + (((i-1)%3)+1)); - } - } +public class ReadGroupBlackListFilterUnitTest extends ReadFilterTest { @Test(expectedExceptions=ReviewedStingException.class) public void testBadFilter() { @@ -88,14 +58,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterReadGroup() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); + filterList.add("RG:" + getReadGroupId(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -104,14 +74,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterPlatformUnit() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -123,18 +93,18 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); - filterList.add("RG:" + READ_GROUP_PREFIX + "3"); + filterList.add("RG:" + getReadGroupId(1)); + filterList.add("RG:" + getReadGroupId(3)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -153,7 +123,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -163,17 +133,17 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -202,10 +172,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -231,7 +201,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -241,10 +211,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -270,7 +240,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..30e2f0f1b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALL} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class UnsafeMalformedReadFilterUnitTest extends AllowNCigarMalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)); + } + + +} From a95fbd48e5712b4785ed3a54a8daadd21729c22f Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Mon, 10 Jun 2013 13:10:32 -0400 Subject: [PATCH 38/99] Moving QualifyMissingIntervals to protected Making this walker available so we can share it with the CSER group for CLIA analysis. --- .../walkers/diagnostics/missing/Metrics.java | 110 +++++++++ .../missing/QualifyMissingIntervals.java | 226 ++++++++++++++++++ 2 files changed, 336 insertions(+) create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java new file mode 100644 index 000000000..5e3da5f4f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java @@ -0,0 +1,110 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +/** + * Short one line description of the walker. + *

+ *

+ * [Long description of the walker] + *

+ *

+ *

+ *

Input

+ *

+ * [Description of the Input] + *

+ *

+ *

Output

+ *

+ * [Description of the Output] + *

+ *

+ *

Examples

+ *
+ *    java
+ *      -jar GenomeAnalysisTK.jar
+ *      -T [walker name]
+ *  
+ * + * @author Mauricio Carneiro + * @since 5/1/13 + */ +final class Metrics { + private double gccontent; + private double baseQual; + private double mapQual; + private int reads; + private int refs; + + void reads(int reads) {this.reads = reads;} + void refs(int refs) {this.refs = refs;} + + void gccontent(double gccontent) {this.gccontent = gccontent;} + void baseQual(double baseQual) {this.baseQual = baseQual;} + void mapQual(double mapQual) {this.mapQual = mapQual;} + + double gccontent() {return refs > 0 ? gccontent/refs : 0.0;} + double baseQual() {return reads > 0 ? baseQual/reads : 0.0;} + double mapQual() {return reads > 0 ? mapQual/reads : 0.0;} + + /** + * Combines two metrics + * + * @param value the other metric to combine + * @return itself, for simple reduce + */ + public Metrics combine(Metrics value) { + this.gccontent += value.gccontent; + this.baseQual += value.baseQual; + this.mapQual += value.mapQual; + this.reads += value.reads; + this.refs += value.refs; + + return this; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java new file mode 100644 index 000000000..62716d6d2 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java @@ -0,0 +1,226 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.CommandLineGATK; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.report.GATKReport; +import org.broadinstitute.sting.gatk.walkers.By; +import org.broadinstitute.sting.gatk.walkers.DataSource; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; +import org.broadinstitute.sting.gatk.walkers.NanoSchedulable; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; +import org.broadinstitute.sting.utils.help.HelpConstants; +import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.PrintStream; +import java.util.List; + +/** + * Walks along reference and calculates a few metrics for each interval. + * + * Metrics: + *
    + *
  • Average Base Quality
  • + *
  • Average Mapping Quality
  • + *
  • GC Content
  • + *
  • Position in the target
  • + *
  • Coding Sequence / Intron
  • + *
  • Length of the uncovered area
  • + *
+ * + *

Input

+ *

+ * A reference file + *

+ * + *

Output

+ *

+ * GC content calculations per interval. + *

+ * + *

Example

+ *
+ * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ *   -T QualifyMissingIntervals \
+ *   -R ref.fasta \
+ *   -o output.grp \
+ *   -L input.intervals \
+ *   -cds cds.intervals \
+ *   -targets targets.intervals
+ * 
+ * + */ +@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) +@By(DataSource.REFERENCE) +public final class QualifyMissingIntervals extends LocusWalker implements NanoSchedulable { + @Output + protected PrintStream out; + + @Argument(shortName = "targets", required = true) + public File targetsFile; + + @Argument(shortName = "cds", required = false) + public File cdsFile; + + GATKReport simpleReport; + GenomeLocSortedSet target; + GenomeLocSortedSet cds; + + public boolean isReduceByInterval() { + return true; + } + + public void initialize() { + simpleReport = GATKReport.newSimpleReport("QualifyMissingIntervals", "IN", "GC", "BQ", "MQ", "TP", "CD", "LN"); + final GenomeLocParser parser = getToolkit().getGenomeLocParser(); + target = new GenomeLocSortedSet(parser); + cds = new GenomeLocSortedSet(parser); + parseFile(targetsFile, target, parser); + parseFile(cdsFile, cds, parser); + } + + public Metrics reduceInit() { + return new Metrics(); + } + + public Metrics map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + if (tracker == null) + return null; + + final Metrics metrics = new Metrics(); + final byte baseIndex = ref.getBase(); + final ReadBackedPileup pileup = context.getBasePileup(); + final int nBases = pileup.getNumberOfElements(); + + double baseQual = 0.0; + for (byte qual : pileup.getQuals()) { + baseQual += qual; + } + double mapQual = 0.0; + for (byte qual : pileup.getMappingQuals()) { + mapQual += qual; + } + + metrics.baseQual(baseQual); + metrics.mapQual(mapQual); + metrics.gccontent(baseIndex == 'C' || baseIndex == 'G' ? 1.0 : 0.0); + metrics.reads(nBases); + metrics.refs(1); + + return metrics; + } + + @Override + public Metrics reduce(Metrics value, Metrics sum) { + return sum.combine(value); + } + + public void onTraversalDone(List> results) { + for (Pair r : results) { + GenomeLoc interval = r.getFirst(); + Metrics metrics = r.getSecond(); + simpleReport.addRow( + interval.toString(), + metrics.gccontent(), + metrics.baseQual(), + metrics.mapQual(), + getPositionInTarget(interval), + cds.overlaps(interval), + interval.size() + ); + } + simpleReport.print(out); + out.close(); + } + + private static GenomeLoc parseInterval(String s, GenomeLocParser parser) { + if (s.isEmpty()) { + return null; + } + String[] first = s.split(":"); + if (first.length == 2) { + String[] second = first[1].split("\\-"); + return parser.createGenomeLoc(first[0], Integer.decode(second[0]), Integer.decode(second[1])); + } else { + throw new UserException.BadInput("Interval doesn't parse correctly: " + s); + } + } + + private void parseFile(File file, GenomeLocSortedSet set, GenomeLocParser parser) { + try { + for (String s : new XReadLines(file) ) { + GenomeLoc interval = parseInterval(s, parser); + if (interval != null) + set.add(interval, true); + } + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + private int getPositionInTarget(GenomeLoc interval) { + final List hits = target.getOverlapping(interval); + int result = 0; + for (GenomeLoc hit : hits) { + result = interval.getStart() - hit.getStart(); // if there are multiple hits, we'll get the last one. + } + return result; + } +} From c84f0deb1d8e946f8fb25fa6f0390af5e0689fff Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Mon, 10 Jun 2013 13:42:00 -0400 Subject: [PATCH 39/99] Don't crash if cds file is not provided CDS file should be optional. --- .../walkers/diagnostics/missing/QualifyMissingIntervals.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java index 62716d6d2..cdcf32dcc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java @@ -117,7 +117,7 @@ public final class QualifyMissingIntervals extends LocusWalker public File targetsFile; @Argument(shortName = "cds", required = false) - public File cdsFile; + public File cdsFile = null; GATKReport simpleReport; GenomeLocSortedSet target; @@ -133,7 +133,8 @@ public final class QualifyMissingIntervals extends LocusWalker target = new GenomeLocSortedSet(parser); cds = new GenomeLocSortedSet(parser); parseFile(targetsFile, target, parser); - parseFile(cdsFile, cds, parser); + if (cdsFile != null) + parseFile(cdsFile, cds, parser); } public Metrics reduceInit() { From 1d67d07cf118ca74a8a8b7987c68c6a252b47c29 Mon Sep 17 00:00:00 2001 From: Mauricio Carneiro Date: Mon, 10 Jun 2013 15:17:40 -0400 Subject: [PATCH 40/99] better docs for Qualify Missing Intervals now that it's available to the public, better give'em good docs! --- .../walkers/diagnostics/missing/QualifyMissingIntervals.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java index cdcf32dcc..d0db3ef98 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java @@ -87,7 +87,7 @@ import java.util.List; * *

Input

*

- * A reference file + * A reference file (for GC content), the input bam file (for base and mapping quality calculation), the missing intervals (in the -L), the baits/targets used to sequence (in the -targets) and a bed file with the coding sequence intervals of the genome (in the -cds) *

* *

Output

@@ -100,6 +100,7 @@ import java.util.List; * java -Xmx2g -jar GenomeAnalysisTK.jar \ * -T QualifyMissingIntervals \ * -R ref.fasta \ + * -I input.bam \ * -o output.grp \ * -L input.intervals \ * -cds cds.intervals \ From 0d593cff70ece0fae75ed149f6819a5eeeaf9a2b Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 6 Jun 2013 14:32:47 -0400 Subject: [PATCH 41/99] Refactor rsID and overlap detection in VariantOverlapAnnotator utility class -- Variants will be considered matching if they have the same reference allele and at least 1 common alternative allele. This matching algorithm determines how rsID are added back into the VariantContext we want to annotate, and as well determining the overlap FLAG attribute field. -- Updated VariantAnnotator and VariantsToVCF to use this class, removing its old stale implementation -- Added unit tests for this VariantOverlapAnnotator class -- Removed GATKVCFUtils.rsIDOfFirstRealVariant as this is now better to use VariantOverlapAnnotator -- Now requires strict allele matching, without any option to just use site annotation. --- .../VariantOverlapAnnotatorUnitTest.java | 164 +++++++++++++ ...dGenotyperIndelCallingIntegrationTest.java | 12 +- ...GenotyperNormalCallingIntegrationTest.java | 2 +- .../walkers/annotator/VariantAnnotator.java | 4 - .../annotator/VariantAnnotatorEngine.java | 115 ++------- .../annotator/VariantOverlapAnnotator.java | 224 ++++++++++++++++++ .../walkers/variantutils/VariantsToVCF.java | 11 +- .../sting/utils/variant/GATKVCFUtils.java | 15 -- 8 files changed, 425 insertions(+), 122 deletions(-) create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java create mode 100644 public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java new file mode 100644 index 000000000..6d6761f1c --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java @@ -0,0 +1,164 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.*; + +public class VariantOverlapAnnotatorUnitTest extends BaseTest { + private GenomeLocParser genomeLocParser; + private IndexedFastaSequenceFile seq; + + @BeforeClass + public void setup() throws FileNotFoundException { + // sequence + seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + genomeLocParser = new GenomeLocParser(seq); + } + + private VariantContext makeVC(final String source, final String id, final List alleles) { + final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(source, "20", 10, alleles); + return new VariantContextBuilder(vc).id(id).make(); + } + + private VariantOverlapAnnotator makeAnnotator(final String dbSNP, final String ... overlaps) { + final RodBinding dbSNPBinding = dbSNP == null ? null : new RodBinding<>(VariantContext.class, dbSNP); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : overlaps ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap), overlap); + return new VariantOverlapAnnotator(dbSNPBinding, overlapBinding, genomeLocParser); + } + + @Test + public void testCreateWithSpecialNames() { + final List names = Arrays.asList("X", "Y", "Z"); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : names ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap + "Binding"), overlap); + final VariantOverlapAnnotator annotator = new VariantOverlapAnnotator(null, overlapBinding, genomeLocParser); + Assert.assertEquals(annotator.getOverlapNames(), names); + } + + @DataProvider(name = "AnnotateRsIDData") + public Object[][] makeAnnotateRsIDData() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final VariantContext callNoIDAC = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C")); + final VariantContext callNoIDAT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "T")); + final VariantContext callIDAC = makeVC("call", "foo", Arrays.asList("A", "C")); + final VariantContext callExistingIDAC = makeVC("call", "rsID1", Arrays.asList("A", "C")); + + final VariantContext dbSNP_AC = makeVC("DBSNP", "rsID1", Arrays.asList("A", "C")); + final VariantContext dbSNP_AT = makeVC("DBSNP", "rsID2", Arrays.asList("A", "T")); + final VariantContext dbSNP_AG = makeVC("DBSNP", "rsID3", Arrays.asList("A", "G")); + final VariantContext dbSNP_AC_AT = makeVC("DBSNP", "rsID1;rsID2", Arrays.asList("A", "C", "T")); + final VariantContext dbSNP_AC_AG = makeVC("DBSNP", "rsID1;rsID3", Arrays.asList("A", "C", "G")); + + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AC), "foo" + ";" + dbSNP_AC.getID(), true}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AT), "foo", false}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AT), "rsID1", false}); + + final VariantContext callNoIDACT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C", "T")); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AT), dbSNP_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AG), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AT), dbSNP_AC_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AG), dbSNP_AC_AG.getID(), true}); + + // multiple options + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC, dbSNP_AT), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT, dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AG), "rsID1;rsID3", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AG), VCFConstants.EMPTY_ID_FIELD, false}); + + final VariantContext dbSNP_AC_FAIL = new VariantContextBuilder(makeVC("DBSNP", "rsID1", Arrays.asList("A", "C"))).filter("FAIL").make(); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_FAIL), VCFConstants.EMPTY_ID_FIELD, false}); + + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateRsID(final VariantContext toAnnotate, final List dbSNPRecords, final String expectedID, final boolean expectOverlap) throws Exception { + final VariantOverlapAnnotator annotator = makeAnnotator("dbnsp"); + final VariantContext annotated = annotator.annotateRsID(dbSNPRecords, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), expectedID); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateOverlaps(final VariantContext toAnnotate, final List records, final String expectedID, final boolean expectOverlap) throws Exception { + final String name = "binding"; + final VariantOverlapAnnotator annotator = makeAnnotator(null, name); + final VariantContext annotated = annotator.annotateOverlap(records, name, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), toAnnotate.getID(), "Shouldn't modify annotation"); + Assert.assertEquals(annotated.hasAttribute(name), expectOverlap); + if ( expectOverlap ) { + Assert.assertEquals(annotated.getAttribute(name), true); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 856e97ebe..98a482c6f 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -73,7 +73,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("19f77f557150905ef3fa4713f611a1b9")); + Arrays.asList("14ad6eeed46e9b6f4757370267b1a1cc")); executeTest(String.format("test indel caller in SLX"), spec); } @@ -101,7 +101,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("bb3dbad9666ebf38d338f0c9c211a42e")); + Arrays.asList("cd184a2a5a1932dcf3e8f0424652176b")); executeTest(String.format("test indel calling, multiple technologies"), spec); } @@ -111,7 +111,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("8052390ca2b6a57c3ddf379a51225d64")); + Arrays.asList("e8d98996eb81ece8cfb52437920ae2e0")); executeTest("test MultiSample Pilot2 indels with alleles passed in", spec); } @@ -121,7 +121,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { baseCommandIndels + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("b6b9dba97fbabaeeb458a41051983e7b")); + Arrays.asList("23a78c16f64bffe1dea3a5587fcabdad")); executeTest("test MultiSample Pilot2 indels with alleles passed in and emitting all sites", spec); } @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("38730c7030271f5d0ca0b59365d57814")); + Arrays.asList("294183823d678d3668f4fa98b4de6e06")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } @@ -176,7 +176,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction0() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.0", 1, - Arrays.asList("264325878b988acc11d8e5d9d2ba0b7f")); + Arrays.asList("e90256acfc360fc4bf377094732a673a")); executeTest("test minIndelFraction 0.0", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index a52176a08..bf4316415 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -80,7 +80,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testWithAllelesPassedIn2() { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommand + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "allelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,025,000", 1, - Arrays.asList("698e54aeae3130779d246b9480a4052c")); + Arrays.asList("60115af273fde49c76d4df6c9c0f6501")); executeTest("test MultiSample Pilot2 with alleles passed in and emitting all sites", spec2); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java index f2bd6c14c..10ba4ca17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java @@ -180,9 +180,6 @@ public class VariantAnnotator extends RodWalker implements Ann @Argument(fullName="MendelViolationGenotypeQualityThreshold",shortName="mvq",required=false,doc="The genotype quality threshold in order to annotate mendelian violation ratio") public double minGenotypeQualityP = 0.0; - @Argument(fullName="requireStrictAlleleMatch", shortName="strict", doc="If provided only comp tracks that exactly match both reference and alternate alleles will be counted as concordant", required=false) - protected boolean requireStrictAlleleMatch = false; - private VariantAnnotatorEngine engine; /** @@ -204,7 +201,6 @@ public class VariantAnnotator extends RodWalker implements Ann else engine = new VariantAnnotatorEngine(annotationGroupsToUse, annotationsToUse, annotationsToExclude, this, getToolkit()); engine.initializeExpressions(expressionsToUse); - engine.setRequireStrictAlleleMatch(requireStrictAlleleMatch); // setup the header fields // note that if any of the definitions conflict with our new ones, then we want to overwrite the old ones diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java index 695868bb1..90050a10a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java @@ -34,26 +34,23 @@ import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.variant.GATKVCFUtils; -import org.broadinstitute.variant.vcf.*; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.variant.variantcontext.*; +import org.broadinstitute.variant.vcf.*; import java.util.*; public class VariantAnnotatorEngine { - private List requestedInfoAnnotations = Collections.emptyList(); private List requestedGenotypeAnnotations = Collections.emptyList(); - private List requestedExpressions = new ArrayList(); + private List requestedExpressions = new ArrayList<>(); - private final HashMap, String> dbAnnotations = new HashMap, String>(); private final AnnotatorCompatible walker; private final GenomeAnalysisEngine toolkit; - private boolean requireStrictAlleleMatch = false; + VariantOverlapAnnotator variantOverlapAnnotator = null; protected static class VAExpression { @@ -85,7 +82,7 @@ public class VariantAnnotatorEngine { requestedInfoAnnotations = AnnotationInterfaceManager.createAllInfoFieldAnnotations(); requestedGenotypeAnnotations = AnnotationInterfaceManager.createAllGenotypeAnnotations(); excludeAnnotations(annotationsToExclude); - initializeDBs(); + initializeDBs(toolkit); } // use this constructor if you want to select specific annotations (and/or interfaces) @@ -93,14 +90,7 @@ public class VariantAnnotatorEngine { this.walker = walker; this.toolkit = toolkit; initializeAnnotations(annotationGroupsToUse, annotationsToUse, annotationsToExclude); - initializeDBs(); - } - - // experimental constructor for active region traversal - public VariantAnnotatorEngine(GenomeAnalysisEngine toolkit) { - this.walker = null; - this.toolkit = toolkit; - requestedInfoAnnotations = AnnotationInterfaceManager.createInfoFieldAnnotations(Arrays.asList("ActiveRegionBasedAnnotation"), Collections.emptyList()); + initializeDBs(toolkit); } // select specific expressions to use @@ -138,16 +128,19 @@ public class VariantAnnotatorEngine { requestedGenotypeAnnotations = tempRequestedGenotypeAnnotations; } - private void initializeDBs() { - + private void initializeDBs(final GenomeAnalysisEngine engine) { // check to see whether comp rods were included - final RodBinding dbsnp = walker.getDbsnpRodBinding(); - if ( dbsnp != null && dbsnp.isBound() ) - dbAnnotations.put(dbsnp, VCFConstants.DBSNP_KEY); + RodBinding dbSNPBinding = walker.getDbsnpRodBinding(); + if ( dbSNPBinding != null && ! dbSNPBinding.isBound() ) + dbSNPBinding = null; - final List> comps = walker.getCompRodBindings(); - for ( RodBinding rod : comps ) - dbAnnotations.put(rod, rod.getName()); + final Map, String> overlapBindings = new LinkedHashMap<>(); + for ( final RodBinding b : walker.getCompRodBindings()) + if ( b.isBound() ) overlapBindings.put(b, b.getName()); + if ( dbSNPBinding != null && ! overlapBindings.keySet().contains(VCFConstants.DBSNP_KEY) ) + overlapBindings.put(dbSNPBinding, VCFConstants.DBSNP_KEY); // add overlap detection with DBSNP by default + + variantOverlapAnnotator = new VariantOverlapAnnotator(dbSNPBinding, overlapBindings, engine.getGenomeLocParser()); } public void invokeAnnotationInitializationMethods( Set headerLines ) { @@ -161,14 +154,13 @@ public class VariantAnnotatorEngine { } public Set getVCFAnnotationDescriptions() { - Set descriptions = new HashSet(); for ( InfoFieldAnnotation annotation : requestedInfoAnnotations ) descriptions.addAll(annotation.getDescriptions()); for ( GenotypeAnnotation annotation : requestedGenotypeAnnotations ) descriptions.addAll(annotation.getDescriptions()); - for ( String db : dbAnnotations.values() ) { + for ( String db : variantOverlapAnnotator.getOverlapNames() ) { if ( VCFStandardHeaderLines.getInfoLine(db, false) != null ) descriptions.add(VCFStandardHeaderLines.getInfoLine(db)); else @@ -178,10 +170,6 @@ public class VariantAnnotatorEngine { return descriptions; } - public void setRequireStrictAlleleMatch( final boolean requireStrictAlleleMatch ) { - this.requireStrictAlleleMatch = requireStrictAlleleMatch; - } - public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, @@ -192,13 +180,10 @@ public class VariantAnnotatorEngine { public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, - VariantContext vc, + final VariantContext vc, final Map perReadAlleleLikelihoodMap) { Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); - // annotate db occurrences - vc = annotateDBs(tracker, ref.getLocus(), vc, infoAnnotations); - // annotate expressions where available annotateExpressions(tracker, ref.getLocus(), infoAnnotations); @@ -213,7 +198,10 @@ public class VariantAnnotatorEngine { VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } public VariantContext annotateContext(final Map perReadAlleleLikelihoodMap, VariantContext vc) { @@ -241,66 +229,13 @@ public class VariantAnnotatorEngine { * Annotate the ID field and other DBs for the given Variant Context * * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc * @param vc variant context to annotate * @return non-null annotated version of vc */ - @Requires({"tracker != null && loc != null && vc != null"}) - @Ensures("result != null") - public VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc) { - final Map newInfoAnnotations = new HashMap(0); - vc = annotateDBs(tracker, loc, vc, newInfoAnnotations); - - if ( !newInfoAnnotations.isEmpty() ) { - final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(newInfoAnnotations); - vc = builder.make(); - } - - return vc; - } - - /** - * Annotate the ID field and other DBs for the given Variant Context - * - * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc - * @param vc variant context to annotate - * @param infoAnnotations info annotation map to populate - * @return non-null annotated version of vc - */ @Requires({"tracker != null && loc != null && vc != null && infoAnnotations != null"}) @Ensures("result != null") - private VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc, final Map infoAnnotations) { - for ( Map.Entry, String> dbSet : dbAnnotations.entrySet() ) { - if ( dbSet.getValue().equals(VCFConstants.DBSNP_KEY) ) { - final String rsID = GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), loc), vc.getType()); - - // add the ID if appropriate - if ( rsID != null ) { - // put the DB key into the INFO field - infoAnnotations.put(VCFConstants.DBSNP_KEY, true); - - if ( vc.emptyID() ) { - vc = new VariantContextBuilder(vc).id(rsID).make(); - } else if ( walker.alwaysAppendDbsnpId() && vc.getID().indexOf(rsID) == -1 ) { - final String newRsID = vc.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID; - vc = new VariantContextBuilder(vc).id(newRsID).make(); - } - } - } else { - boolean overlapsComp = false; - for ( VariantContext comp : tracker.getValues(dbSet.getKey(), loc) ) { - if ( !comp.isFiltered() && ( !requireStrictAlleleMatch || comp.getAlleles().equals(vc.getAlleles()) ) ) { - overlapsComp = true; - break; - } - } - if ( overlapsComp ) - infoAnnotations.put(dbSet.getValue(), overlapsComp); - } - } - - return vc; + private VariantContext annotateDBs(final RefMetaDataTracker tracker, VariantContext vc) { + return variantOverlapAnnotator.annotateOverlaps(tracker, variantOverlapAnnotator.annotateRsID(tracker, vc)); } private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map infoAnnotations) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java new file mode 100644 index 000000000..0efabba3c --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java @@ -0,0 +1,224 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; + +import java.util.*; + +/** + * Annotate the ID field and attribute overlap FLAGs for a VariantContext against a RefMetaDataTracker or a list + * of VariantContexts + */ +public final class VariantOverlapAnnotator { + final RodBinding dbSNPBinding; + final Map, String> overlapBindings; + final GenomeLocParser genomeLocParser; + + /** + * Create a new VariantOverlapAnnotator without overall bindings + * + * @see #VariantOverlapAnnotator(org.broadinstitute.sting.commandline.RodBinding, java.util.Map, org.broadinstitute.sting.utils.GenomeLocParser) + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, GenomeLocParser genomeLocParser) { + this(dbSNPBinding, Collections., String>emptyMap(), genomeLocParser); + } + + /** + * Create a new VariantOverlapAnnotator + * + * @param dbSNPBinding the RodBinding to use for updating ID field values, or null if that behavior isn't desired + * @param overlapBindings a map of RodBindings / name to use for overlap annotation. Each binding will be used to + * add name => true for variants that overlap with variants found to a + * RefMetaDataTracker at each location. Can be empty but not null + * @param genomeLocParser the genome loc parser we'll use to create GenomeLocs for VariantContexts + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, Map, String> overlapBindings, GenomeLocParser genomeLocParser) { + if ( overlapBindings == null ) throw new IllegalArgumentException("overlapBindings cannot be null"); + if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null"); + + this.dbSNPBinding = dbSNPBinding; + this.overlapBindings = overlapBindings; + this.genomeLocParser = genomeLocParser; + } + + /** + * Update rsID in vcToAnnotate with rsIDs from dbSNPBinding fetched from tracker + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { + if ( dbSNPBinding != null ) { + final GenomeLoc loc = getLoc(vcToAnnotate); + return annotateRsID(tracker.getValues(dbSNPBinding, loc), vcToAnnotate); + } else { + return vcToAnnotate; + } + } + + /** + * Update rsID of vcToAnnotate with rsID match found in vcsAtLoc, if one exists + * + * @param vcsAtLoc a list of variant contexts starting at this location to use as sources for rsID values + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final List vcsAtLoc, final VariantContext vcToAnnotate ) { + final String rsID = getRsID(vcsAtLoc, vcToAnnotate); + + // add the ID if appropriate + if ( rsID != null ) { + final VariantContextBuilder vcb = new VariantContextBuilder(vcToAnnotate); + + if ( ! vcToAnnotate.hasID() ) { + return vcb.id(rsID).make(); + } else if ( ! vcToAnnotate.getID().contains(rsID) ) { + return vcb.id(vcToAnnotate.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID).make(); + } // falling through to return VC lower down + } + + // nothing to do, just return vc + return vcToAnnotate; + } + + private GenomeLoc getLoc(final VariantContext vc) { + return genomeLocParser.createGenomeLoc(vc); + } + + /** + * Add overlap attributes to vcToAnnotate against all overlapBindings in tracker + * + * @see #annotateOverlap(java.util.List, , String, org.broadinstitute.variant.variantcontext.VariantContext) + * for more information + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated overlaps update fields value + */ + public VariantContext annotateOverlaps(final RefMetaDataTracker tracker, VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + VariantContext annotated = vcToAnnotate; + final GenomeLoc loc = getLoc(vcToAnnotate); + for ( Map.Entry, String> overlapBinding : overlapBindings.entrySet() ) { + annotated = annotateOverlap(tracker.getValues(overlapBinding.getKey(), loc), overlapBinding.getValue(), vcToAnnotate); + } + + return annotated; + } + + /** + * Add overlaps flag attributes to vcToAnnotate binding overlapTestVCs.getSource() => true if + * an overlapping variant context can be found in overlapTestVCs with vcToAnnotate + * + * Overlaps here means that the reference alleles are the same and at least one alt + * allele in vcToAnnotate is equals to one of the alt alleles in overlapTestVCs + * + * @param overlapTestVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param attributeKey the key to set to true in the attribute map for vcToAnnotate if it overlaps + * @param vcToAnnotate a non-null VariantContext to annotate + * @return + */ + public VariantContext annotateOverlap(final List overlapTestVCs, final String attributeKey, VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + final boolean overlaps = overlaps(overlapTestVCs, vcToAnnotate); + if ( overlaps ) { + return new VariantContextBuilder(vcToAnnotate).attribute(attributeKey, true).make(); + } else { + return vcToAnnotate; + } + } + + /** + * Returns the ID field of the first VariantContext in rsIDSourceVCs that has the same reference allele + * as vcToAnnotate and all of the alternative alleles in vcToAnnotate. + * + * Doesn't require vcToAnnotate to be a complete match, so + * + * A/C/G in VC in rsIDSourceVCs + * + * would match the a VC with A/C but not A/T. Also we don't require all alleles to match + * so we would also match A/C/T to A/C/G. + * + * Will only match rsIDSourceVCs that aren't failing filters. + * + * @param rsIDSourceVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return a String to use for the rsID from rsIDSourceVCs if one matches, or null if none matches + */ + private String getRsID(final List rsIDSourceVCs, final VariantContext vcToAnnotate) { + if ( rsIDSourceVCs == null ) throw new IllegalArgumentException("rsIDSourceVCs cannot be null"); + if ( vcToAnnotate == null ) throw new IllegalArgumentException("vcToAnnotate cannot be null"); + + for ( VariantContext vcComp : rsIDSourceVCs ) { + if ( vcComp.isFiltered() ) continue; // don't process any failed VCs + + if ( ! vcComp.getChr().equals(vcToAnnotate.getChr()) || vcComp.getStart() != vcToAnnotate.getStart() ) + throw new IllegalArgumentException("source rsID VariantContext " + vcComp + " doesn't start at the same position as vcToAnnotate " + vcToAnnotate); + + if ( vcToAnnotate.getReference().equals(vcComp.getReference()) ) { + for ( final Allele allele : vcToAnnotate.getAlternateAlleles() ) { + if ( vcComp.getAlternateAlleles().contains(allele) ) + return vcComp.getID(); + } + } + } + + return null; + } + + /** + * Does vcToAnnotate overlap with any of the records in potentialOverlaps? + * + * @param potentialOverlaps a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return true if vcToAnnotate overlaps (position and all alt alleles) with some variant in potentialOverlaps + */ + private boolean overlaps(final List potentialOverlaps, final VariantContext vcToAnnotate) { + return getRsID(potentialOverlaps, vcToAnnotate) != null; + } + + /** + * Get the collection of the RodBinding names for those being used for overlap detection + * @return a non-null collection of Strings + */ + public Collection getOverlapNames() { + return overlapBindings.values(); + } +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java index 60809134a..dbb68961f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java @@ -39,6 +39,7 @@ import org.broadinstitute.sting.gatk.refdata.utils.GATKFeature; import org.broadinstitute.sting.gatk.walkers.Reference; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.gatk.walkers.Window; +import org.broadinstitute.sting.gatk.walkers.annotator.VariantOverlapAnnotator; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; @@ -112,24 +113,21 @@ public class VariantsToVCF extends RodWalker { // for dealing with indels in hapmap CloseableIterator dbsnpIterator = null; + VariantOverlapAnnotator variantOverlapAnnotator = null; public void initialize() { vcfwriter = VariantContextWriterFactory.sortOnTheFly(baseWriter, 40, false); + variantOverlapAnnotator = new VariantOverlapAnnotator(dbsnp.dbsnp, getToolkit().getGenomeLocParser()); } public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { if ( tracker == null || !BaseUtils.isRegularBase(ref.getBase()) ) return 0; - String rsID = dbsnp == null ? null : GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbsnp.dbsnp, context.getLocation()), VariantContext.Type.SNP); - Collection contexts = getVariantContexts(tracker, ref); for ( VariantContext vc : contexts ) { VariantContextBuilder builder = new VariantContextBuilder(vc); - if ( rsID != null && vc.emptyID() ) { - builder.id(rsID).make(); - } // set the appropriate sample name if necessary if ( sampleName != null && vc.hasGenotypes() && vc.hasGenotype(variants.getName()) ) { @@ -137,7 +135,8 @@ public class VariantsToVCF extends RodWalker { builder.genotypes(g); } - writeRecord(builder.make(), tracker, ref.getLocus()); + final VariantContext withID = variantOverlapAnnotator.annotateRsID(tracker, builder.make()); + writeRecord(withID, tracker, ref.getLocus()); } return 1; diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java index 0fba432e7..aa2e92559 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java @@ -149,21 +149,6 @@ public class GATKVCFUtils { return VCFUtils.withUpdatedContigs(header, engine.getArguments().referenceFile, engine.getMasterSequenceDictionary()); } - public static String rsIDOfFirstRealVariant(List VCs, VariantContext.Type type) { - if ( VCs == null ) - return null; - - String rsID = null; - for ( VariantContext vc : VCs ) { - if ( vc.getType() == type ) { - rsID = vc.getID(); - break; - } - } - - return rsID; - } - /** * Utility class to read all of the VC records from a file * From 1c03ebc82d3b67a78d947aec39ffc28a552244b3 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 6 Jun 2013 15:38:06 -0400 Subject: [PATCH 42/99] Implement ActiveRegionTraversal RefMetaDataTracker for map call; HaplotypeCaller now annotates ID from dbSNP -- Reuse infrastructure for RODs for reads to implement general IntervalReferenceOrderedView so that both TraverseReads and TraverseActiveRegions can use the same underlying infrastructure -- TraverseActiveRegions now provides a meaningful RefMetaDataTracker to ActiveRegionWalker.map -- Cleanup misc. code as it came up -- Resolves GSA-808: Write general utility code to do rsID allele matching, hook up to UG and HC --- .../haplotypecaller/GenotypingEngine.java | 4 +- .../haplotypecaller/HaplotypeCaller.java | 10 +- .../HaplotypeCallerIntegrationTest.java | 27 ++- .../IntervalOverlappingRODsFromStream.java | 8 +- .../IntervalReferenceOrderedView.java | 184 ++++++++++++++++++ .../ManagingReferenceOrderedView.java | 3 +- .../ReadBasedReferenceOrderedView.java | 104 +--------- .../providers/ReferenceOrderedView.java | 3 +- .../datasources/providers/RodLocusView.java | 3 +- .../gatk/executive/LinearMicroScheduler.java | 7 - .../traversals/TraverseActiveRegions.java | 157 +++++++++------ .../gatk/traversals/TraverseLociNano.java | 2 +- .../annotator/VariantAnnotatorEngine.java | 13 +- .../annotator/VariantOverlapAnnotator.java | 8 +- .../sting/gatk/ReadMetricsUnitTest.java | 1 - ...IntervalReferenceOrderedViewUnitTest.java} | 4 +- .../ReferenceOrderedViewUnitTest.java | 6 +- .../TraverseActiveRegionsUnitTest.java | 2 - .../traversals/TraverseReadsUnitTest.java | 4 +- 19 files changed, 352 insertions(+), 198 deletions(-) create mode 100644 public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java rename public/java/test/org/broadinstitute/sting/gatk/datasources/providers/{ReadBasedReferenceOrderedViewUnitTest.java => IntervalReferenceOrderedViewUnitTest.java} (98%) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index cbcba28fd..04173b64f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -49,6 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeLikelihoodsCalculationModel; import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; @@ -146,6 +147,7 @@ public class GenotypingEngine { final GenomeLoc refLoc, final GenomeLoc activeRegionWindow, final GenomeLocParser genomeLocParser, + final RefMetaDataTracker tracker, final List activeAllelesToGenotype ) { // sanity check input arguments if (UG_engine == null) throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); @@ -204,7 +206,7 @@ public class GenotypingEngine { convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, 0.0 ) ); final Map stratifiedReadMap = filterToOnlyOverlappingReads( genomeLocParser, alleleReadMap_annotations, perSampleFilteredReadList, call ); - VariantContext annotatedCall = annotationEngine.annotateContext(stratifiedReadMap, call); + VariantContext annotatedCall = annotationEngine.annotateContextForActiveRegion(tracker, stratifiedReadMap, call); if( call.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 182e59493..e55413649 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -441,7 +441,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private final static int MIN_READ_LENGTH = 10; private List samplesList = new ArrayList(); - private final List allelesToGenotype = new ArrayList(); private final static Allele FAKE_REF_ALLELE = Allele.create("N", true); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file private final static Allele FAKE_ALT_ALLELE = Allele.create("", false); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file @@ -596,7 +595,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { final VariantContext vcFromAllelesRod = UnifiedGenotyperEngine.getVCFromAllelesRod(tracker, ref, ref.getLocus(), false, logger, UG_engine.getUAC().alleles); if( vcFromAllelesRod != null ) { - allelesToGenotype.add(vcFromAllelesRod); // save for later for processing during the ActiveRegion's map call. Should be folded into a RefMetaDataTracker object return new ActivityProfileState(ref.getLocus(), 1.0); } } @@ -664,12 +662,11 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final List activeAllelesToGenotype = new ArrayList<>(); if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - for( final VariantContext vc : allelesToGenotype ) { - if( originalActiveRegion.getLocation().overlapsP( getToolkit().getGenomeLocParser().createGenomeLoc(vc) ) ) { + for ( final VariantContext vc : metaDataTracker.getValues(UG_engine.getUAC().alleles) ) { + if ( vc.isNotFiltered() ) { activeAllelesToGenotype.add(vc); // do something with these VCs during GGA mode } } - allelesToGenotype.removeAll( activeAllelesToGenotype ); // No alleles found in this region so nothing to do! if ( activeAllelesToGenotype.isEmpty() ) { return NO_CALLS; } } else { @@ -704,6 +701,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In assemblyResult.paddedReferenceLoc, assemblyResult.regionForGenotyping.getLocation(), getToolkit().getGenomeLocParser(), + metaDataTracker, activeAllelesToGenotype ); // TODO -- must disable if we are doing NCT, or set the output type of ! presorted @@ -890,8 +888,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Override public Integer reduce(List callsInRegion, Integer numCalledRegions) { for( final VariantContext call : callsInRegion ) { - // TODO -- uncomment this line once ART-based walkers have a proper RefMetaDataTracker. - // annotationEngine.annotateDBs(metaDataTracker, getToolkit().getGenomeLocParser().createGenomeLoc(call), call); vcfWriter.add( call ); } return (callsInRegion.isEmpty() ? 0 : 1) + numCalledRegions; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 77be9fba2..904f15728 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -47,15 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; -import org.broad.tribble.TribbleIndexedFeatureReader; import org.broadinstitute.sting.WalkerTest; -import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.variant.GATKVCFUtils; import org.broadinstitute.variant.variantcontext.VariantContext; -import org.broadinstitute.variant.vcf.VCFCodec; import org.testng.annotations.Test; import java.io.File; @@ -69,6 +66,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { final static String NA12878_CHR20_BAM = validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam"; final static String CEUTRIO_BAM = validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam"; final static String NA12878_RECALIBRATED_BAM = privateTestDir + "NA12878.100kb.BQSRv2.example.bam"; + final static String NA12878_PCRFREE = privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam"; final static String CEUTRIO_MT_TEST_BAM = privateTestDir + "CEUTrio.HiSeq.b37.MT.1_50.bam"; final static String INTERVALS_FILE = validationDataLocation + "NA12878.HiSeq.b37.chr20.10_11mb.test.intervals"; @@ -199,4 +197,27 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { Arrays.asList("86bdd07a3ac4f6ce239c30efea8bf5ba")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } + + // -------------------------------------------------------------------------------------------------------------- + // + // test dbSNP annotation + // + // -------------------------------------------------------------------------------------------------------------- + + @Test + public void HCTestDBSNPAnnotationWGS() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, + Arrays.asList("7b23a288a31cafca3946f14f2381e7cb")); + executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); + } + + @Test + public void HCTestDBSNPAnnotationWEx() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + + " -L " + hg19Intervals + " -isr INTERSECTION", 1, + Arrays.asList("9587029b702bb59bd4dfec69eac4c210")); + executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java index fe3a0c6ce..3aff745fa 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java @@ -72,8 +72,6 @@ class IntervalOverlappingRODsFromStream { /** * Get the list of RODs overlapping loc from this stream of RODs. * - * Sequential calls to this function must obey the rule that loc2.getStart >= loc1.getStart - * * @param loc the interval to query * @return a non-null RODRecordList containing the overlapping RODs, which may be empty */ @@ -84,7 +82,6 @@ class IntervalOverlappingRODsFromStream { if ( lastQuery != null && loc.getStart() < lastQuery.getStart() ) throw new IllegalArgumentException(String.format("BUG: query interval (%s) starts before the previous interval %s", loc, lastQuery)); - trimCurrentFeaturesToLoc(loc); readOverlappingFutureFeatures(loc); return new RODRecordListImpl(name, subsetToOverlapping(loc, currentFeatures), loc); } @@ -128,11 +125,14 @@ class IntervalOverlappingRODsFromStream { /** * Update function. Remove all elements of currentFeatures that end before loc * + * Must be called by clients periodically when they know they they will never ask for data before + * loc, so that the running cache of RODs doesn't grow out of control. + * * @param loc the location to use */ @Requires("loc != null") @Ensures("currentFeatures.size() <= old(currentFeatures.size())") - private void trimCurrentFeaturesToLoc(final GenomeLoc loc) { + public void trimCurrentFeaturesToLoc(final GenomeLoc loc) { final ListIterator it = currentFeatures.listIterator(); while ( it.hasNext() ) { final GATKFeature feature = it.next(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java new file mode 100644 index 000000000..5e884ce53 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java @@ -0,0 +1,184 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.datasources.providers; + +import net.sf.picard.util.PeekableIterator; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.reads.ReadShard; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.refdata.utils.LocationAwareSeekableRODIterator; +import org.broadinstitute.sting.gatk.refdata.utils.RODRecordList; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * a ROD view that allows for requests for RODs that overlap intervals on the genome to produce a RefMetaDataTracker + */ +public class IntervalReferenceOrderedView implements ReferenceOrderedView { + /** a list of the RMDDataState (location->iterators) */ + private final List states = new ArrayList<>(1); + + /** + * Used to get genome locs for reads + */ + protected final GenomeLocParser genomeLocParser; + + /** + * The total extent of all reads in this span. We create iterators from our RODs + * from the start of this span, to the end. + */ + private final GenomeLoc shardSpan; + + /** + * Create a new IntervalReferenceOrderedView taking data from provider and capable of + * servicing ROD overlap requests within the genomic interval span + * + * @param provider a ShardDataProvider to give us data + * @param span a GenomeLoc span, or null indicating take the entire genome + */ + public IntervalReferenceOrderedView(final ShardDataProvider provider, final GenomeLoc span) { + if ( provider == null ) throw new IllegalArgumentException("provider cannot be null"); + if ( provider.hasReferenceOrderedData() && span == null ) throw new IllegalArgumentException("span cannot be null when provider has reference ordered data"); + + this.genomeLocParser = provider.getGenomeLocParser(); + this.shardSpan = span; + provider.register(this); + + // conditional to optimize the case where we don't have any ROD data + if ( provider.hasReferenceOrderedData() && ! shardSpan.isUnmapped() ) { + for (final ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) + states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); + } + } + + /** + * Testing constructor + */ + protected IntervalReferenceOrderedView(final GenomeLocParser genomeLocParser, + final GenomeLoc shardSpan, + final List names, + final List> featureSources) { + this.genomeLocParser = genomeLocParser; + this.shardSpan = shardSpan; + for ( int i = 0; i < names.size(); i++ ) + states.add(new RMDDataState(names.get(i), featureSources.get(i))); + } + + public Collection> getConflictingViews() { + List> classes = new ArrayList<>(); + classes.add(ManagingReferenceOrderedView.class); + return classes; + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping the start position of loc + * @param loc a GenomeLoc of size == 1 + * @return a non-null RefMetaDataTracker + */ + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus(GenomeLoc loc) { + if ( loc == null ) throw new IllegalArgumentException("loc cannot be null"); + if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc); + return getReferenceOrderedDataForInterval(loc); + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping interval + * + * @param interval a non=null interval + * @return a non-null RefMetaDataTracker + */ + public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { + if ( interval == null ) throw new IllegalArgumentException("Interval cannot be null"); + + if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) + return RefMetaDataTracker.EMPTY_TRACKER; + else { + final List bindings = new ArrayList<>(states.size()); + for ( final RMDDataState state : states ) + bindings.add(state.stream.getOverlapping(interval)); + return new RefMetaDataTracker(bindings); + } + } + + /** + * Trim down all of the ROD managers so that they only hold ROD bindings wit start >= startOfDataToKeep.getStart() + * + * @param startOfDataToKeep a non-null genome loc + */ + public void trimCurrentFeaturesToLoc(final GenomeLoc startOfDataToKeep) { + if ( startOfDataToKeep == null ) throw new IllegalArgumentException("startOfDataToKeep cannot be null"); + + for ( final RMDDataState state : states ) + state.stream.trimCurrentFeaturesToLoc(startOfDataToKeep); + } + + /** + * Closes the current view. + */ + public void close() { + for (final RMDDataState state : states) + state.close(); + + // Clear out the existing data so that post-close() accesses to this data will fail-fast. + states.clear(); + } + + /** + * Models the traversal state of a given ROD lane. + */ + private static class RMDDataState { + public final ReferenceOrderedDataSource dataSource; + public final IntervalOverlappingRODsFromStream stream; + private final LocationAwareSeekableRODIterator iterator; + + public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { + this.dataSource = dataSource; + this.iterator = iterator; + this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator)); + } + + /** + * For testing + */ + public RMDDataState(final String name, final PeekableIterator iterator) { + this.dataSource = null; + this.iterator = null; + this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator)); + } + + public void close() { + if ( dataSource != null ) + dataSource.close( iterator ); + } + } +} + diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java index 09b72f5eb..50f2369cb 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java @@ -76,7 +76,8 @@ public class ManagingReferenceOrderedView implements ReferenceOrderedView { * @param loc Locus at which to track. * @return A tracker containing information about this locus. */ - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { if ( states.isEmpty() ) return RefMetaDataTracker.EMPTY_TRACKER; else { diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java index 52f490972..84e27c953 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java @@ -42,52 +42,9 @@ import java.util.Collection; import java.util.List; /** a ROD view for reads. This provides the Read traversals a way of getting a RefMetaDataTracker */ -public class ReadBasedReferenceOrderedView implements View { - // a list of the RMDDataState (location->iterators) - private final List states = new ArrayList(1); - private final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker(); - - /** - * Used to get genome locs for reads - */ - private final GenomeLocParser genomeLocParser; - - /** - * The total extent of all reads in this span. We create iterators from our RODs - * from the start of this span, to the end. - */ - private final GenomeLoc shardSpan; - +public class ReadBasedReferenceOrderedView extends IntervalReferenceOrderedView { public ReadBasedReferenceOrderedView(final ShardDataProvider provider) { - this.genomeLocParser = provider.getGenomeLocParser(); - // conditional to optimize the case where we don't have any ROD data - this.shardSpan = provider.getReferenceOrderedData() != null ? ((ReadShard)provider.getShard()).getReadsSpan() : null; - provider.register(this); - - if ( provider.getReferenceOrderedData() != null && ! shardSpan.isUnmapped() ) { - for (ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) - states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); - } - } - - - /** - * Testing constructor - */ - protected ReadBasedReferenceOrderedView(final GenomeLocParser genomeLocParser, - final GenomeLoc shardSpan, - final List names, - final List> featureSources) { - this.genomeLocParser = genomeLocParser; - this.shardSpan = shardSpan; - for ( int i = 0; i < names.size(); i++ ) - states.add(new RMDDataState(names.get(i), featureSources.get(i))); - } - - public Collection> getConflictingViews() { - List> classes = new ArrayList>(); - classes.add(ManagingReferenceOrderedView.class); - return classes; + super(provider, provider.hasReferenceOrderedData() ? ((ReadShard)provider.getShard()).getReadsSpan() : null); } /** @@ -101,60 +58,11 @@ public class ReadBasedReferenceOrderedView implements View { @Ensures("result != null") public RefMetaDataTracker getReferenceOrderedDataForRead(final SAMRecord rec) { if ( rec.getReadUnmappedFlag() ) - // empty RODs for unmapped reads - return new RefMetaDataTracker(); - else - return getReferenceOrderedDataForInterval(genomeLocParser.createGenomeLoc(rec)); - } - - @Requires({"interval != null", "shardSpan == null || shardSpan.isUnmapped() || shardSpan.containsP(interval)"}) - @Ensures("result != null") - public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { - if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) - return EMPTY_TRACKER; + return RefMetaDataTracker.EMPTY_TRACKER; else { - final List bindings = new ArrayList(states.size()); - for ( final RMDDataState state : states ) - bindings.add(state.stream.getOverlapping(interval)); - return new RefMetaDataTracker(bindings); - } - } - - /** - * Closes the current view. - */ - public void close() { - for (final RMDDataState state : states) - state.close(); - - // Clear out the existing data so that post-close() accesses to this data will fail-fast. - states.clear(); - } - - /** Models the traversal state of a given ROD lane. */ - private static class RMDDataState { - public final ReferenceOrderedDataSource dataSource; - public final IntervalOverlappingRODsFromStream stream; - private final LocationAwareSeekableRODIterator iterator; - - public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { - this.dataSource = dataSource; - this.iterator = iterator; - this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator(iterator)); - } - - /** - * For testing - */ - public RMDDataState(final String name, final PeekableIterator iterator) { - this.dataSource = null; - this.iterator = null; - this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator(iterator)); - } - - public void close() { - if ( dataSource != null ) - dataSource.close( iterator ); + final GenomeLoc readSpan = genomeLocParser.createGenomeLoc(rec); + trimCurrentFeaturesToLoc(readSpan); + return getReferenceOrderedDataForInterval(readSpan); } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java index fa83dff82..85c20a6c3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java @@ -25,10 +25,9 @@ package org.broadinstitute.sting.gatk.datasources.providers; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; public interface ReferenceOrderedView extends View { - RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext refContext ); + RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java index 3fb4c7352..1b6c14628 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java @@ -98,7 +98,8 @@ public class RodLocusView extends LocusView implements ReferenceOrderedView { rodQueue = new RODMergingIterator(iterators); } - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { // special case the interval again -- add it into the ROD if ( interval != null ) { allTracksHere.add(interval); } return new RefMetaDataTracker(allTracksHere); diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java index 415049228..dc46849df 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java @@ -37,7 +37,6 @@ import org.broadinstitute.sting.gatk.io.DirectOutputTracker; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.traversals.TraversalEngine; -import org.broadinstitute.sting.gatk.traversals.TraverseActiveRegions; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.threading.ThreadEfficiencyMonitor; @@ -114,12 +113,6 @@ public class LinearMicroScheduler extends MicroScheduler { done = walker.isDone(); } - // Special function call to empty out the work queue. Ugly for now but will be cleaned up when we eventually push this functionality more into the engine - if( traversalEngine instanceof TraverseActiveRegions) { - final Object result = ((TraverseActiveRegions) traversalEngine).endTraversal(walker, accumulator.getReduceInit()); - accumulator.accumulate(null, result); // Assumes only used with StandardAccumulator - } - Object result = accumulator.finishTraversal(); outputTracker.close(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index cac93cb07..b85365366 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -29,14 +29,12 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; -import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ActiveRegionTraversalParameters; import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; -import org.broadinstitute.sting.gatk.walkers.DataSource; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; @@ -92,12 +90,26 @@ public final class TraverseActiveRegions extends TraversalEngine walker; - final NanoScheduler nanoScheduler; + final NanoScheduler nanoScheduler; + + /** + * Data to use in the ActiveRegionWalker.map function produced by the NanoScheduler input iterator + */ + private static class MapData { + public ActiveRegion activeRegion; + public RefMetaDataTracker tracker; + + private MapData(ActiveRegion activeRegion, RefMetaDataTracker tracker) { + this.activeRegion = activeRegion; + this.tracker = tracker; + } + } /** * Create a single threaded active region traverser @@ -112,12 +124,12 @@ public final class TraverseActiveRegions extends TraversalEngine(nThreads); - nanoScheduler.setProgressFunction(new NSProgressFunction() { + nanoScheduler.setProgressFunction(new NSProgressFunction() { @Override - public void progress(ActiveRegion lastActiveRegion) { + public void progress(MapData lastActiveRegion) { if ( lastActiveRegion != null ) // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon - printProgress(lastActiveRegion.getLocation().getStopLocation()); + printProgress(lastActiveRegion.activeRegion.getLocation().getStopLocation()); } }); } @@ -149,13 +161,6 @@ public final class TraverseActiveRegions extends TraversalEngine extends TraversalEngine extends TraversalEngine walker, - final LocusShardDataProvider dataProvider, - final LocusView locusView) { - if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA ) - return new ManagingReferenceOrderedView( dataProvider ); - else - return (RodLocusView)locusView; - } - - // ------------------------------------------------------------------------------------- // // Actual traverse function @@ -254,7 +267,7 @@ public final class TraverseActiveRegions extends TraversalEngine activeRegionIterator = new ActiveRegionIterator(dataProvider); + final Iterator activeRegionIterator = new ActiveRegionIterator(dataProvider); final TraverseActiveRegionMap myMap = new TraverseActiveRegionMap(); final TraverseActiveRegionReduce myReduce = new TraverseActiveRegionReduce(); final T result = nanoScheduler.execute(activeRegionIterator, myMap, sum, myReduce); @@ -262,29 +275,53 @@ public final class TraverseActiveRegions extends TraversalEngine { + private class ActiveRegionIterator implements Iterator { private final LocusShardDataProvider dataProvider; - private LinkedList readyActiveRegions = new LinkedList(); + private LinkedList readyActiveRegions = new LinkedList<>(); private boolean done = false; private final LocusView locusView; private final LocusReferenceView referenceView; - private final ReferenceOrderedView referenceOrderedDataView; private final GenomeLoc locOfLastReadAtTraversalStart; + private final IntervalReferenceOrderedView referenceOrderedDataView; + private final GenomeLoc currentWindow; + private final boolean processRemainingActiveRegions; public ActiveRegionIterator( final LocusShardDataProvider dataProvider ) { this.dataProvider = dataProvider; locusView = new AllLocusView(dataProvider); referenceView = new LocusReferenceView( walker, dataProvider ); - referenceOrderedDataView = getReferenceOrderedView(walker, dataProvider, locusView); + + // The data shard may carry a number of locations to process (due to being indexed together). + // This value is just the interval we are processing within the entire provider + currentWindow = dataProvider.getLocus(); + final int currentWindowPos = dataProvider.getShard().getGenomeLocs().indexOf(currentWindow); + if ( currentWindowPos == -1 ) throw new IllegalStateException("Data provider " + dataProvider + " didn't have our current window in it " + currentWindow); + processRemainingActiveRegions = currentWindowPos == dataProvider.getShard().getGenomeLocs().size() - 1; + + // the rodSpan covers all of the bases in the activity profile, including all of the bases + // through the current window interval. This is because we may issue a query to get data for an + // active region spanning before the current interval as far back as the start of the current profile, + // if we have pending work to do that finalizes in this interval. + final GenomeLoc rodSpan = activityProfile.getSpan() == null ? currentWindow : activityProfile.getSpan().endpointSpan(currentWindow); + if ( ! dataProvider.getShard().getLocation().containsP(rodSpan) ) throw new IllegalStateException("Rod span " + rodSpan + " isn't contained within the data shard " + dataProvider.getShard().getLocation() + ", meaning we wouldn't get all of the data we need"); + referenceOrderedDataView = new IntervalReferenceOrderedView( dataProvider, rodSpan ); // We keep processing while the next reference location is within the interval locOfLastReadAtTraversalStart = spanOfLastSeenRead(); + + // load in the workQueue the present regions that span the current contig, if it's different from the last one + if ( walkerHasPresetRegions && ( lastRegionProcessed == null || ! currentWindow.onSameContig(lastRegionProcessed)) ) { + loadPresetRegionsForContigToWorkQueue(currentWindow.getContig()); + } + + // remember the last region we processed for sanity checking later + lastRegionProcessed = currentWindow; } @Override public void remove() { throw new UnsupportedOperationException("Cannot remove from ActiveRegionIterator"); } @Override - public ActiveRegion next() { + public MapData next() { return readyActiveRegions.pop(); } @Override @@ -326,7 +363,7 @@ public final class TraverseActiveRegions extends TraversalEngine newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false); + final List newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView); dataProvider.getShard().getReadMetrics().incrementNumIterations(); @@ -335,7 +372,7 @@ public final class TraverseActiveRegions extends TraversalEngine extends TraversalEngine walker, T sum) { - for ( final ActiveRegion region : prepActiveRegionsForProcessing((ActiveRegionWalker)walker, true, true) ) { - final M x = ((ActiveRegionWalker) walker).map(region, null); - sum = walker.reduce( x, sum ); - } - return sum; - } - // ------------------------------------------------------------------------------------- // // Functions to manage and interact with the live / dead zone @@ -594,7 +627,10 @@ public final class TraverseActiveRegions extends TraversalEngine prepActiveRegionsForProcessing(final ActiveRegionWalker walker, final boolean flushActivityProfile, final boolean forceAllRegionsToBeActive) { + private List prepActiveRegionsForProcessing(final ActiveRegionWalker walker, + final boolean flushActivityProfile, + final boolean forceAllRegionsToBeActive, + final IntervalReferenceOrderedView referenceOrderedDataView) { if ( ! walkerHasPresetRegions ) { // We don't have preset regions, so we get our regions from the activity profile final Collection activeRegions = activityProfile.popReadyActiveRegions(getActiveRegionExtension(), getMinRegionSize(), getMaxRegionSize(), flushActivityProfile); @@ -603,13 +639,13 @@ public final class TraverseActiveRegions extends TraversalEngine readyRegions = new LinkedList(); + final LinkedList readyRegions = new LinkedList<>(); while( workQueue.peek() != null ) { final ActiveRegion activeRegion = workQueue.peek(); if ( forceAllRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) { writeActivityProfile(activeRegion.getSupportingStates()); writeActiveRegion(activeRegion); - readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker)); + readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker, referenceOrderedDataView)); } else { break; } @@ -619,8 +655,10 @@ public final class TraverseActiveRegions extends TraversalEngine walker) { - final List stillLive = new LinkedList(); + private MapData prepActiveRegionForProcessing(final ActiveRegion activeRegion, + final ActiveRegionWalker walker, + final IntervalReferenceOrderedView referenceOrderedDataView) { + final List stillLive = new LinkedList<>(); for ( final GATKSAMRecord read : myReads.popCurrentReads() ) { boolean killed = false; final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read ); @@ -653,14 +691,21 @@ public final class TraverseActiveRegions extends TraversalEngine { + private class TraverseActiveRegionMap implements NSMapFunction { @Override - public M apply(final ActiveRegion activeRegion) { - if ( DEBUG ) logger.info("Executing walker.map for " + activeRegion + " in thread " + Thread.currentThread().getName()); - return walker.map(activeRegion, null); + public M apply(final MapData mapData) { + if ( DEBUG ) logger.info("Executing walker.map for " + mapData.activeRegion + " in thread " + Thread.currentThread().getName()); + return walker.map(mapData.activeRegion, mapData.tracker); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseLociNano.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseLociNano.java index 8e67963c1..627f98d69 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseLociNano.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseLociNano.java @@ -179,7 +179,7 @@ public class TraverseLociNano extends TraversalEngine, final ReferenceContext refContext = referenceView.getReferenceContext(location); // Iterate forward to get all reference ordered data covering this location - final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location, refContext); + final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location); numIterations++; return new MapData(locus, refContext, tracker); diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java index 90050a10a..078a36dd9 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java @@ -204,8 +204,10 @@ public class VariantAnnotatorEngine { return annotateDBs(tracker, annotated); } - public VariantContext annotateContext(final Map perReadAlleleLikelihoodMap, VariantContext vc) { - Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); + public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker, + final Map perReadAlleleLikelihoodMap, + final VariantContext vc) { + final Map infoAnnotations = new LinkedHashMap<>(vc.getAttributes()); // go through all the requested info annotationTypes for ( InfoFieldAnnotation annotationType : requestedInfoAnnotations ) { @@ -219,10 +221,13 @@ public class VariantAnnotatorEngine { } // generate a new annotated VC - VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); + final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java index 0efabba3c..07af4bd74 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java @@ -122,7 +122,7 @@ public final class VariantOverlapAnnotator { /** * Add overlap attributes to vcToAnnotate against all overlapBindings in tracker * - * @see #annotateOverlap(java.util.List, , String, org.broadinstitute.variant.variantcontext.VariantContext) + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) * for more information * * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate @@ -130,12 +130,12 @@ public final class VariantOverlapAnnotator { * @param vcToAnnotate a variant context to annotate * @return a VariantContext (may be == to vcToAnnotate) with updated overlaps update fields value */ - public VariantContext annotateOverlaps(final RefMetaDataTracker tracker, VariantContext vcToAnnotate) { + public VariantContext annotateOverlaps(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { if ( overlapBindings.isEmpty() ) return vcToAnnotate; VariantContext annotated = vcToAnnotate; final GenomeLoc loc = getLoc(vcToAnnotate); - for ( Map.Entry, String> overlapBinding : overlapBindings.entrySet() ) { + for ( final Map.Entry, String> overlapBinding : overlapBindings.entrySet() ) { annotated = annotateOverlap(tracker.getValues(overlapBinding.getKey(), loc), overlapBinding.getValue(), vcToAnnotate); } @@ -186,7 +186,7 @@ public final class VariantOverlapAnnotator { if ( rsIDSourceVCs == null ) throw new IllegalArgumentException("rsIDSourceVCs cannot be null"); if ( vcToAnnotate == null ) throw new IllegalArgumentException("vcToAnnotate cannot be null"); - for ( VariantContext vcComp : rsIDSourceVCs ) { + for ( final VariantContext vcComp : rsIDSourceVCs ) { if ( vcComp.isFiltered() ) continue; // don't process any failed VCs if ( ! vcComp.getChr().equals(vcToAnnotate.getChr()) || vcComp.getStart() != vcToAnnotate.getStart() ) diff --git a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java index 3225a128c..56725147e 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java @@ -256,7 +256,6 @@ public class ReadMetricsUnitTest extends BaseTest { } windowMaker.close(); } - traverseActiveRegions.endTraversal(walker, 0); Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java similarity index 98% rename from public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java rename to public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java index bf4d36d92..784bd727e 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java @@ -49,7 +49,7 @@ import java.util.*; /** * @author depristo */ -public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { +public class IntervalReferenceOrderedViewUnitTest extends BaseTest { private static int startingChr = 1; private static int endingChr = 2; private static int readCount = 100; @@ -285,7 +285,7 @@ public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { Collections.sort(intervals); final GenomeLoc span = span(intervals); - final ReadBasedReferenceOrderedView view = new ReadBasedReferenceOrderedView(genomeLocParser, span, names, iterators); + final IntervalReferenceOrderedView view = new IntervalReferenceOrderedView(genomeLocParser, span, names, iterators); if ( testStateless ) { // test each tracker is well formed, as each is created diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java index fad632cfd..1d39f43c6 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java @@ -97,7 +97,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.emptyList()); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10)); Assert.assertEquals(tracker.getValues(Feature.class).size(), 0, "The tracker should not have produced any data"); } @@ -115,7 +115,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.singletonList(dataSource)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest")); Assert.assertEquals(datum.get("COL1"),"C","datum parameter for COL1 is incorrect"); @@ -141,7 +141,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Arrays.asList(dataSource1,dataSource2)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum1 = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest1")); Assert.assertEquals(datum1.get("COL1"),"C","datum1 parameter for COL1 is incorrect"); diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java index 1f5cd6d0e..e4b6c37cc 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java @@ -405,8 +405,6 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam)) t.traverse(walker, dataProvider, 0); - t.endTraversal(walker, 0); - return walker.mappedActiveRegions; } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java index e8840c39f..5b52d4e33 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java @@ -32,6 +32,7 @@ import org.broadinstitute.sting.commandline.Tags; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountReads; @@ -47,6 +48,7 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.testng.Assert.fail; @@ -146,7 +148,7 @@ public class TraverseReadsUnitTest extends BaseTest { fail("Shard == null"); } - ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null,null); + ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null, Collections.emptyList()); accumulator = traversalEngine.traverse(countReadWalker, dataProvider, accumulator); dataProvider.close(); } From 58e354176e14e4175940f6783962661d33f18774 Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Tue, 11 Jun 2013 10:33:22 -0400 Subject: [PATCH 43/99] Minor changes to docs in the graph pruning. --- .../haplotypecaller/graphs/LowWeightChainPruner.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java index 7327b5736..27b6bd902 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java @@ -80,7 +80,7 @@ public class LowWeightChainPruner { final Set edgesToKeep = new LinkedHashSet<>(); for ( final Path linearChain : getLinearChains(graph) ) { - if( mustBeKeep(linearChain, pruneFactor) ) { + if( mustBeKept(linearChain, pruneFactor) ) { // we must keep edges in any path that contains a reference edge or an edge with weight > pruneFactor edgesToKeep.addAll(linearChain.getEdges()); } @@ -96,10 +96,14 @@ public class LowWeightChainPruner { } /** - * Get the maximum pruning multiplicity seen on any edge in this graph - * @return an integer > 0 + * Traverse the edges in the path and determine if any are either ref edges or have weight above + * the pruning factor and should therefore not be pruned away. + * + * @param path the path in question + * @param pruneFactor the integer pruning factor + * @return true if any edge in the path must be kept */ - private boolean mustBeKeep(final Path path, final int pruneFactor) { + private boolean mustBeKept(final Path path, final int pruneFactor) { for ( final E edge : path.getEdges() ) { if ( edge.getPruningMultiplicity() >= pruneFactor || edge.isRef() ) return true; From e4e7d39e2c8e9cb6a21f5152f46e20d334f81df0 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Thu, 23 May 2013 12:02:19 -0400 Subject: [PATCH 44/99] Fix FN problem stemming from sequence graphs that contain cycles. Problem: The sequence graphs can get very complex and it's not enough just to test that any given read has non-unique kmers. Reads with variants can have kmers that match unique regions of the reference, and this causes cycles in the final sequence graph. Ultimately the problem is that kmers of 10/25 may not be large enough for these complex regions. Solution: We continue to try kmers of 10/25 but detect whether cycles exist; if so, we do not use them. If (and only if) we can't get usable graphs from the 10/25 kmers, then we start iterating over larger kmers until we either can generate a graph without cycles or attempt too many iterations. --- .../haplotypecaller/HaplotypeCaller.java | 6 +- .../readthreading/ReadThreadingAssembler.java | 145 +++++++++++------- .../readthreading/ReadThreadingGraph.java | 18 ++- .../ReadThreadingGraphUnitTest.java | 35 +++++ 4 files changed, 147 insertions(+), 57 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index e55413649..a41b68e2c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -266,6 +266,10 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="kmerSize", shortName="kmerSize", doc="Kmer size to use in the read threading assembler", required = false) protected List kmerSizes = Arrays.asList(10, 25); + @Advanced + @Argument(fullName="dontIncreaseKmerSizesForCycles", shortName="dontIncreaseKmerSizesForCycles", doc="Should we disable the iterating over kmer sizes when graph cycles are detected?", required = false) + protected boolean dontIncreaseKmerSizesForCycles = false; + /** * Assembly graph can be quite complex, and could imply a very large number of possible haplotypes. Each haplotype * considered requires N PairHMM evaluations if there are N reads across all samples. In order to control the @@ -520,7 +524,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final int maxAllowedPathsForReadThreadingAssembler = Math.max(maxPathsPerSample * nSamples, MIN_PATHS_PER_GRAPH); assemblyEngine = useDebruijnAssembler ? new DeBruijnAssembler(minKmerForDebruijnAssembler, onlyUseKmerSizeForDebruijnAssembler) - : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes); + : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes, dontIncreaseKmerSizesForCycles); assemblyEngine.setErrorCorrectKmers(errorCorrectKmers); assemblyEngine.setPruneFactor(MIN_PRUNE_FACTOR); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index 123b36640..0887929ab 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -49,6 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.LocalAssemblyEngine; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; @@ -63,11 +64,14 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { private final static int DEFAULT_NUM_PATHS_PER_GRAPH = 128; private final static int GGA_MODE_ARTIFICIAL_COUNTS = 1000; + private final static int KMER_SIZE_ITERATION_INCREASE = 10; + private final static int MAX_KMER_ITERATIONS_TO_ATTEMPT = 6; /** The min and max kmer sizes to try when building the graph. */ private final List kmerSizes; private final int maxAllowedPathsForReadThreadingAssembler; + private final boolean dontIncreaseKmerSizesForCycles; private boolean requireReasonableNumberOfPaths = false; protected boolean removePathsNotConnectedToRef = true; private boolean justReturnRawGraph = false; @@ -77,10 +81,15 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { this(DEFAULT_NUM_PATHS_PER_GRAPH, Arrays.asList(25)); } - public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes, final boolean dontIncreaseKmerSizesForCycles) { super(maxAllowedPathsForReadThreadingAssembler); this.kmerSizes = kmerSizes; this.maxAllowedPathsForReadThreadingAssembler = maxAllowedPathsForReadThreadingAssembler; + this.dontIncreaseKmerSizesForCycles = dontIncreaseKmerSizesForCycles; + } + + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { + this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, false); } /** for testing purposes */ @@ -89,67 +98,99 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { } @Override - public List assemble( final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { + public List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes) { final List graphs = new LinkedList<>(); + // first, try using the requested kmer sizes for ( final int kmerSize : kmerSizes ) { - final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly); + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes); + if ( graph != null ) + graphs.add(graph); + } - // add the reference sequence to the graph - rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); - - // add the artificial GGA haplotypes to the graph - int hapCount = 0; - for( final Haplotype h : activeAlleleHaplotypes ) { - final int[] counts = new int[h.length()]; - Arrays.fill(counts, GGA_MODE_ARTIFICIAL_COUNTS); - rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), counts, false); - } - - // Next pull kmers out of every read and throw them on the graph - for( final GATKSAMRecord read : reads ) { - rtgraph.addRead(read); - } - - // actually build the read threading graph - rtgraph.buildGraphIfNecessary(); - printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.0.raw_readthreading_graph.dot")); - - // go through and prune all of the chains where all edges have <= pruneFactor. This must occur - // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering - // tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1 - rtgraph.pruneLowWeightChains(pruneFactor); - - // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if - // we can recover them by merging some N bases from the chain back into the reference uniquely, for - // N < kmerSize - if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); - - // remove all heading and trailing paths - if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); - - printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot")); - - final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); - - // if the unit tests don't want us to cleanup the graph, just return the raw sequence graph - if ( justReturnRawGraph ) return Collections.singletonList(initialSeqGraph); - - if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); - printDebugGraphTransform(initialSeqGraph, new File("sequenceGraph.0.2.initial_seqgraph.dot")); - initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction - - final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); - if ( seqGraph != null ) { - if ( ! requireReasonableNumberOfPaths || reasonableNumberOfPaths(seqGraph) ) { - graphs.add(seqGraph); - } + // if none of those worked, iterate over larger sizes if allowed to do so + if ( graphs.isEmpty() && !dontIncreaseKmerSizesForCycles ) { + int kmerSize = MathUtils.arrayMaxInt(kmerSizes) + KMER_SIZE_ITERATION_INCREASE; + int numIterations = 1; + while ( graphs.isEmpty() && numIterations <= MAX_KMER_ITERATIONS_TO_ATTEMPT ) { + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes); + if ( graph != null ) + graphs.add(graph); + kmerSize += KMER_SIZE_ITERATION_INCREASE; + numIterations++; } } return graphs; } + /** + * Creates the sequence graph for the given kmerSize + * + * @param reads reads to use + * @param refHaplotype reference haplotype + * @param kmerSize kmer size + * @param activeAlleleHaplotypes the GGA haplotypes to inject into the graph + * @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths) + */ + protected SeqGraph createGraph(final List reads, final Haplotype refHaplotype, final int kmerSize, final List activeAlleleHaplotypes) { + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly); + + // add the reference sequence to the graph + rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); + + // add the artificial GGA haplotypes to the graph + int hapCount = 0; + for ( final Haplotype h : activeAlleleHaplotypes ) { + final int[] counts = new int[h.length()]; + Arrays.fill(counts, GGA_MODE_ARTIFICIAL_COUNTS); + rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), counts, false); + } + + // Next pull kmers out of every read and throw them on the graph + for( final GATKSAMRecord read : reads ) { + rtgraph.addRead(read); + } + + // actually build the read threading graph + rtgraph.buildGraphIfNecessary(); + + // sanity check: make sure there are no cycles in the graph + if ( rtgraph.hasCycles() ) { + if ( debug ) logger.info("Not using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler because it contains a cycle"); + return null; + } + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.0.raw_readthreading_graph.dot")); + + // go through and prune all of the chains where all edges have <= pruneFactor. This must occur + // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering + // tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1 + rtgraph.pruneLowWeightChains(pruneFactor); + + // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if + // we can recover them by merging some N bases from the chain back into the reference uniquely, for + // N < kmerSize + if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); + + // remove all heading and trailing paths + if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot")); + + final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); + + // if the unit tests don't want us to cleanup the graph, just return the raw sequence graph + if ( justReturnRawGraph ) return initialSeqGraph; + + if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); + printDebugGraphTransform(initialSeqGraph, new File("sequenceGraph.0.2.initial_seqgraph.dot")); + initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction + + final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); + return ( seqGraph != null && requireReasonableNumberOfPaths && !reasonableNumberOfPaths(seqGraph) ) ? null : seqGraph; + } + /** * Did we find a reasonable number of paths in this graph? * @param graph diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index 8e879377f..bbc1618ac 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -54,6 +54,7 @@ import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.collections.PrimitivePair; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.jgrapht.EdgeFactory; +import org.jgrapht.alg.CycleDetector; import java.io.File; import java.util.*; @@ -297,7 +298,7 @@ public class ReadThreadingGraph extends BaseGraph(this).detectCycles(); + } + public void recoverDanglingTails() { if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingTails requires the graph be already built"); @@ -409,7 +417,8 @@ public class ReadThreadingGraph extends BaseGraph determineNonUniqueKmers(final SequenceForKmers seqForKmers, final int kmerSize) { // count up occurrences of kmers within each read final KMerCounter counter = new KMerCounter(kmerSize); - for ( int i = 0; i <= seqForKmers.stop - kmerSize; i++ ) { + final int stopPosition = seqForKmers.stop - kmerSize; + for ( int i = 0; i <= stopPosition; i++ ) { final Kmer kmer = new Kmer(seqForKmers.sequence, i, kmerSize); counter.addKmer(kmer, 1); } @@ -578,7 +587,7 @@ public class ReadThreadingGraph extends BaseGraph " + uniqueMergeVertex); @@ -590,7 +599,8 @@ public class ReadThreadingGraph extends BaseGraph reads = new ArrayList<>(); + for ( int index = 0; index < alt.length() - 100; index += 20 ) + reads.add(ArtificialSAMUtils.createArtificialRead(Arrays.copyOfRange(alt.getBytes(), index, index + 100), Utils.dupBytes((byte) 30, 100), 100 + "M")); + + // test that there are cycles detected for small kmer + final ReadThreadingGraph rtgraph25 = new ReadThreadingGraph(25); + rtgraph25.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph25.addRead(read); + rtgraph25.buildGraphIfNecessary(); + Assert.assertTrue(rtgraph25.hasCycles()); + + // test that there are no cycles detected for large kmer + final ReadThreadingGraph rtgraph75 = new ReadThreadingGraph(75); + rtgraph75.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph75.addRead(read); + rtgraph75.buildGraphIfNecessary(); + Assert.assertFalse(rtgraph75.hasCycles()); + } + // TODO -- update to use determineKmerSizeAndNonUniques directly // @DataProvider(name = "KmerSizeData") // public Object[][] makeKmerSizeDataProvider() { From 77868d034f4e006b8e33d2e9bf39447b88790ba7 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Thu, 30 May 2013 14:00:43 -0400 Subject: [PATCH 45/99] Do not allow the use of Ns in reads for graph construction. Ns are treated as wildcards in the PairHMM so creating haplotypes with Ns gives them artificial advantages over other ones. This was the cause of at least one FN where there were Ns at a SNP position. --- .../readthreading/ReadThreadingGraph.java | 15 +++++++++- .../LocalAssemblyEngineUnitTest.java | 2 +- .../ReadThreadingGraphUnitTest.java | 30 ++++++++++++++++--- 3 files changed, 41 insertions(+), 6 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index bbc1618ac..ab6b17c35 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -50,6 +50,7 @@ import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.KMerCounter; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.collections.PrimitivePair; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; @@ -611,7 +612,7 @@ public class ReadThreadingGraph extends BaseGraph= minBaseQualityToUseInAssembly; + } + /** * Get the set of non-unique kmers in this graph. For debugging purposes * @return a non-null set of kmers diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java index 74361de1b..a74ce1c75 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java @@ -251,7 +251,7 @@ public class LocalAssemblyEngineUnitTest extends BaseTest { for ( int snpPos = 0; snpPos < windowSize; snpPos++) { if ( snpPos > excludeVariantsWithXbp && (windowSize - snpPos) >= excludeVariantsWithXbp ) { final byte[] altBases = ref.getBytes(); - altBases[snpPos] = 'N'; + altBases[snpPos] = altBases[snpPos] == 'A' ? (byte)'C' : (byte)'A'; final String alt = new String(altBases); tests.add(new Object[]{"SNP at " + snpPos, assembler, refLoc, ref, alt}); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java index 340777513..67ee52734 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java @@ -48,10 +48,8 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; -import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.MultiSampleEdge; -import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.SeqGraph; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; @@ -180,7 +178,31 @@ public class ReadThreadingGraphUnitTest extends BaseTest { Assert.assertFalse(rtgraph75.hasCycles()); } - // TODO -- update to use determineKmerSizeAndNonUniques directly + @Test(enabled = !DEBUG) + public void testNsInReadsAreNotUsedForGraph() { + + final int length = 100; + final byte[] ref = Utils.dupBytes((byte)'A', length); + + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(25); + rtgraph.addSequence("ref", ref, null, true); + + // add reads with Ns at any position + for ( int i = 0; i < length; i++ ) { + final byte[] bases = ref.clone(); + bases[i] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, Utils.dupBytes((byte) 30, length), length + "M"); + rtgraph.addRead(read); + } + rtgraph.buildGraphIfNecessary(); + + final SeqGraph graph = rtgraph.convertToSequenceGraph(); + final KBestPaths pathFinder = new KBestPaths<>(false); + Assert.assertEquals(pathFinder.getKBestPaths(graph, length, graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex()).size(), 1); + } + + +// TODO -- update to use determineKmerSizeAndNonUniques directly // @DataProvider(name = "KmerSizeData") // public Object[][] makeKmerSizeDataProvider() { // List tests = new ArrayList(); From c0e3874db095836e389d4ff7c277fb778a54c7b8 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Mon, 3 Jun 2013 14:34:29 -0400 Subject: [PATCH 46/99] Change the HC's phredScaledGlobalReadMismappingRate from 60 to 45, because Ryan and Mark told me to. --- .../sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index a41b68e2c..24fd5901f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -336,7 +336,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ @Advanced @Argument(fullName="phredScaledGlobalReadMismappingRate", shortName="globalMAPQ", doc="The global assumed mismapping rate for reads", required = false) - protected int phredScaledGlobalReadMismappingRate = 60; + protected int phredScaledGlobalReadMismappingRate = 45; @Advanced @Argument(fullName="maxNumHaplotypesInPopulation", shortName="maxNumHaplotypesInPopulation", doc="Maximum number of haplotypes to consider for your population. This number will probably need to be increased when calling organisms with high heterozygosity.", required = false) From c0030f3f2dd36d29d70b1fa06daf2e399e99169a Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Tue, 4 Jun 2013 09:26:50 -0400 Subject: [PATCH 47/99] We no longer subset down to the best N haplotypes for the GL calculation. I explain in comments within the code that this was causing problems with the marginalization over events. --- .../haplotypecaller/HaplotypeCaller.java | 26 +++++-------------- .../haplotypecaller/LocalAssemblyEngine.java | 2 +- 2 files changed, 8 insertions(+), 20 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 24fd5901f..3e411ae33 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -694,11 +694,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In //logger.info("Computing read likelihoods with " + assemblyResult.regionForGenotyping.size() + " reads"); final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods( assemblyResult.haplotypes, splitReadsBySample( assemblyResult.regionForGenotyping.getReads() ) ); - // subset down to only the best haplotypes to be genotyped in all samples ( in GGA mode use all discovered haplotypes ) - final List bestHaplotypes = selectBestHaplotypesForGenotyping(assemblyResult.haplotypes, stratifiedReadMap); + // Note: we used to subset down at this point to only the "best" haplotypes in all samples for genotyping, but there + // was a bad interaction between that selection and the marginalization that happens over each event when computing + // GLs. In particular, for samples that are heterozygous non-reference (B/C) the marginalization for B treats the + // haplotype containing C as reference (and vice versa). Now this is fine if all possible haplotypes are included + // in the genotyping, but we lose information if we select down to a few haplotypes. [EB] final GenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( UG_engine, - bestHaplotypes, + assemblyResult.haplotypes, stratifiedReadMap, perSampleFilteredReadList, assemblyResult.fullReferenceWithPadding, @@ -711,7 +714,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // TODO -- must disable if we are doing NCT, or set the output type of ! presorted if ( bamWriter != null ) { haplotypeBAMWriter.writeReadsAlignedToHaplotypes(assemblyResult.haplotypes, assemblyResult.paddedReferenceLoc, - bestHaplotypes, + assemblyResult.haplotypes, calledHaplotypes.getCalledHaplotypes(), stratifiedReadMap); } @@ -863,21 +866,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In return new AssemblyResult(trimmedHaplotypes, trimmedActiveRegion, fullReferenceWithPadding, paddedReferenceLoc, true); } - /** - * Select the best N haplotypes according to their likelihoods, if appropriate - * - * @param haplotypes a list of haplotypes to consider - * @param stratifiedReadMap a map from samples -> read likelihoods - * @return the list of haplotypes to genotype - */ - protected List selectBestHaplotypesForGenotyping(final List haplotypes, final Map stratifiedReadMap) { - if ( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - return haplotypes; - } else { - return likelihoodCalculationEngine.selectBestHaplotypesFromEachSample(haplotypes, stratifiedReadMap, maxNumHaplotypesInPopulation); - } - } - //--------------------------------------------------------------------------------------------------------------- // // reduce diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 1a5f34bc3..2a74e9dd0 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -215,7 +215,7 @@ public abstract class LocalAssemblyEngine { returnHaplotypes.add(h); if ( debug ) - logger.info("Adding haplotype " + h.getCigar() + " from debruijn graph with kmer " + graph.getKmerSize()); + logger.info("Adding haplotype " + h.getCigar() + " from graph with kmer " + graph.getKmerSize()); } } } From 55d5f2194cd4b75f2a2f2e03c1057daa67fe6ade Mon Sep 17 00:00:00 2001 From: Guillermo del Angel Date: Tue, 4 Jun 2013 14:25:26 -0400 Subject: [PATCH 48/99] Read Error Corrector for haplotype assembly Principle is simple: when coverage is deep enough, any single-base read error will look like a rare k-mer but correct sequence will be supported by many reads to correct sequences will look like common k-mers. So, algorithm has 3 main steps: 1. K-mer graph buildup. For each read in an active region, a map from k-mers to the number of times they have been seen is built. 2. Building correction map. All "rare" k-mers that are sparse (by default, seen only once), get mapped to k-mers that are good (by default, seen at least 20 times but this is a CL argument), and that lie within a given Hamming distance (by default, =1). This map can be empty (i.e. k-mers can be uncorrectable). 3. Correction proposal For each constituent k-mer of each read, if this k-mer is rare and maps to a good k-mer, get differing base positions in k-mer and add these to a list of corrections for each base in each read. Then, correct read at positions where correction proposal is unanimous and non-empty. The algorithm defaults are chosen to be very stringent and conservative in the correction: we only try to correct singleton k-mers, we only look for good k-mers lying at Hamming distance = 1 from them, and we only correct a base in read if all correction proposals are congruent. By default, algorithm is disabled but can be enabled in HaplotypeCaller via the -readErrorCorrect CL option. However, at this point it's about 3x-10x more expensive so it needs to be optimized if it's to be used. --- .../haplotypecaller/HaplotypeCaller.java | 30 +- .../gatk/walkers/haplotypecaller/Kmer.java | 52 ++ .../haplotypecaller/LocalAssemblyEngine.java | 22 +- .../haplotypecaller/ReadErrorCorrector.java | 526 ++++++++++++++++++ .../walkers/haplotypecaller/KmerUnitTest.java | 41 +- .../LocalAssemblyEngineUnitTest.java | 2 +- .../ReadErrorCorrectorUnitTest.java | 190 +++++++ 7 files changed, 855 insertions(+), 8 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index e55413649..680ae06e1 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -396,6 +396,20 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) protected boolean allowCyclesInKmerGraphToGeneratePaths = false; + // Parameters to control read error correction + @Hidden + @Argument(fullName="errorCorrectReads", shortName="errorCorrectReads", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected boolean errorCorrectReads = false; + + @Hidden + @Argument(fullName="kmerLengthForReadErrorCorrection", shortName="kmerLengthForReadErrorCorrection", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected int kmerLengthForReadErrorCorrection = 25; + + @Hidden + @Argument(fullName="minObservationsForKmerToBeSolid", shortName="minObservationsForKmerToBeSolid", doc = "A k-mer must be seen at least these times for it considered to be solid", required=false) + protected int minObservationsForKmerToBeSolid = 20; + + // ----------------------------------------------------------------------------------------------- // done with Haplotype caller parameters // ----------------------------------------------------------------------------------------------- @@ -437,6 +451,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // bases with quality less than or equal to this value are trimmed off the tails of the reads private static final byte MIN_TAIL_QUALITY = 20; + private static final byte MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION = 6; // the minimum length of a read we'd consider using for genotyping private final static int MIN_READ_LENGTH = 10; @@ -754,8 +769,13 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); final Haplotype referenceHaplotype = createReferenceHaplotype(activeRegion, paddedReferenceLoc); + // Create ReadErrorCorrector object if requested - will be used within assembly engine. + ReadErrorCorrector readErrorCorrector = null; + if (errorCorrectReads) + readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION, minObservationsForKmerToBeSolid, DEBUG,fullReferenceWithPadding); + try { - final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); + final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype,readErrorCorrector ); if ( ! dontTrimActiveRegions ) { return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); } else { @@ -922,7 +942,13 @@ public class HaplotypeCaller extends ActiveRegionWalker, In for( final GATKSAMRecord myRead : finalizedReadList ) { final GATKSAMRecord postAdapterRead = ( myRead.getReadUnmappedFlag() ? myRead : ReadClipper.hardClipAdaptorSequence( myRead ) ); if( postAdapterRead != null && !postAdapterRead.isEmpty() && postAdapterRead.getCigar().getReadLength() > 0 ) { - GATKSAMRecord clippedRead = useLowQualityBasesForAssembly ? postAdapterRead : ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); + GATKSAMRecord clippedRead; + if (errorCorrectReads) + clippedRead = ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION ); + else if (useLowQualityBasesForAssembly) + clippedRead = postAdapterRead; + else // default case: clip low qual ends of reads + clippedRead= ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); if ( dontUseSoftClippedBases ) { // uncomment to remove hard clips from consideration at all diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java index 745d4de06..2e757722b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java @@ -46,7 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; +import com.google.java.contract.Requires; + import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; /** * Fast wrapper for byte[] kmers @@ -149,6 +153,15 @@ public class Kmer { return bases; } + /** + * Backdoor method for fast base peeking: avoids copying like bases() and doesn't modify internal state. + * Intended to be used for fast computation of neighboring kmers + * @return Reference to complete bases stores in this kmer + * WARNING: UNSAFE, caller should NEVER modify bases. Speed/safety tradeoff!! + */ + private byte[] unsafePeekAtBases() { + return bases; + } /** * Get a string representation of the bases of this kmer * @return a non-null string @@ -165,6 +178,45 @@ public class Kmer { return length; } + /** + * Gets a set of differing positions and bases from another k-mer, limiting up to a max distance. + * For example, if this = "ACATT" and other = "ACGGT": + * - if maxDistance < 2 then -1 will be returned, since distance between kmers is 2. + * - If maxDistance >=2, then 2 will be returned, and arrays will be filled as follows: + * differingIndeces = {2,3} + * differingBases = {'G','G'} + * @param other Other k-mer to test + * @param maxDistance Maximum distance to search. If this and other k-mers are beyond this Hamming distance, + * search is aborted and a null is returned + * @param differingIndeces Array with indices of differing bytes in array + * @param differingBases Actual differing bases + * @return Set of mappings of form (int->byte), where each elements represents index + * of k-mer array where bases mismatch, and the byte is the base from other kmer. + * If both k-mers differ by more than maxDistance, returns null + */ + @Requires({"other != null","differingIndeces != null","differingBases != null", + "differingIndeces.size>=maxDistance","differingBases.size>=maxDistance"}) + public int getDifferingPositions(final Kmer other, + final int maxDistance, + final int[] differingIndeces, + final byte[] differingBases) { + + + int dist = 0; + if (length == other.length()) { + final byte[] f2 = other.unsafePeekAtBases(); + for (int i=0; i < length; i++) + if(bases[start+i] != f2[i]) { + differingIndeces[dist] = i; + differingBases[dist++] = f2[i]; + if (dist > maxDistance) + return -1; + } + + } + return dist; + } + @Override public String toString() { return "Kmer{" + new String(bases()) + "}"; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 1a5f34bc3..9f2197a84 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -128,9 +128,15 @@ public abstract class LocalAssemblyEngine { * @param fullReferenceWithPadding byte array holding the reference sequence with padding * @param refLoc GenomeLoc object corresponding to the reference sequence with padding * @param activeAllelesToGenotype the alleles to inject into the haplotypes during GGA mode + * @param readErrorCorrector a ReadErrorCorrector object, if read are to be corrected before assembly. Can be null if no error corrector is to be used. * @return a non-empty list of all the haplotypes that are produced during assembly */ - public List runLocalAssembly(ActiveRegion activeRegion, Haplotype refHaplotype, byte[] fullReferenceWithPadding, GenomeLoc refLoc, List activeAllelesToGenotype) { + public List runLocalAssembly(final ActiveRegion activeRegion, + final Haplotype refHaplotype, + final byte[] fullReferenceWithPadding, + final GenomeLoc refLoc, + final List activeAllelesToGenotype, + final ReadErrorCorrector readErrorCorrector) { if( activeRegion == null ) { throw new IllegalArgumentException("Assembly engine cannot be used with a null ActiveRegion."); } if( refHaplotype == null ) { throw new IllegalArgumentException("Reference haplotype cannot be null."); } if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); } @@ -139,8 +145,20 @@ public abstract class LocalAssemblyEngine { // create the list of artificial haplotypes that should be added to the graph for GGA mode final List activeAlleleHaplotypes = createActiveAlleleHaplotypes(refHaplotype, activeAllelesToGenotype, activeRegion.getExtendedLoc()); + + // error-correct reads before clipping low-quality tails: some low quality bases might be good and we want to recover them + final List correctedReads; + if (readErrorCorrector != null) { + // now correct all reads in active region after filtering/downsampling + // Note that original reads in active region are NOT modified by default, since they will be used later for GL computation, + // and we only want the read-error corrected reads for graph building. + readErrorCorrector.addReadsToKmers(activeRegion.getReads()); + correctedReads = new ArrayList<>(readErrorCorrector.correctReads(activeRegion.getReads())); + } + else correctedReads = activeRegion.getReads(); + // create the graphs by calling our subclass assemble method - final List graphs = assemble(activeRegion.getReads(), refHaplotype, activeAlleleHaplotypes); + final List graphs = assemble(correctedReads, refHaplotype, activeAlleleHaplotypes); // do some QC on the graphs for ( final SeqGraph graph : graphs ) { sanityCheckGraph(graph, refHaplotype); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java new file mode 100644 index 000000000..e1471ab33 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java @@ -0,0 +1,526 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.clipping.ReadClipper; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.*; + +/** + * Utility class that error-corrects reads. + * Main idea: An error in a read will appear as a bubble in a k-mer (de Bruijn) graph and such bubble will have very low multiplicity. + * Hence, read errors will appear as "sparse" kmers with very little support. + * Historically, the most common approach to error-correct reads before assembly has been to first compute the kmer spectrum of the reads, + * defined as the kmer composition of a set of reads along with the multiplicity of each kmer. + * First-generation correctors like the Euler corrector (Pevzner 2001) mapped low frequency kmers (kmers appearing say below N times) + * into high frequency ones that lied within a certain Hamming or edit distance. + * This is doable, but has some drawbacks: + * - Kmers used for error correction become tied to kmers used for graph building. + * - Hence, large kmers (desireable for graph building because they can resolve repeats better) are a hindrance for error correction, + * because they are seen less often. + * - After error correction, there is no guarantee that a sequence of kmers corresponds to an "actual" read. + * + * An error-corrected set of reads also makes a much smoother graph without the need to resolving so many bubbles. + * + * Idea hence is to correct reads based on their kmer content, but in a context independent from graph building. + * In order to do this, the following steps are taken: + * - The k-mer spectrum of a set of reads in computed. However, we are at freedom to choose the most convenient k-mer size (typicially around + * read length /2). + * - We partition the set of observed k-mers into "solid" kmers which have multiplicity > M, and "insolid" ones otherwise (Pevzner 2001). + * + * - Main idea of the algorithm is to try to substitute a sequence of bases in a read by a sequence better supported by kmers. + * - For each "unsolid" kmer observed in reads, we try to find a "solid" kmer within a maximum Hamming distance. + * - If such solid kmer exists, then this unsolid kmer is "correctable", otherwise, uncorrectable. + * - For each read, then: + * -- Walk through read and visit all kmers. + * -- If kmer is solid, continue to next kmer. + * -- If not, and if it's correctable (i.e. there exists a mapping from an unsolid kmer to a solid kmer within a given Hamming distance), + * add the bases and offsets corresponding to differing positions between unsolid and solid kmer to correction list. + * -- At the end, each base in read will have a list of corrections associated with it. We can then choose to correct or not. + * If read has only consistent corrections, then we can correct base to common base in corrections. + * + * TODO: + * todo Q: WHAT QUALITY TO USE?? + * todo how do we deal with mate pairs? + * + * + + + */ +public class ReadErrorCorrector { + private final static Logger logger = Logger.getLogger(ReadErrorCorrector.class); + /** + * A map of for each kmer to its num occurrences in addKmers + */ + KMerCounter countsByKMer; + + Map kmerCorrectionMap = new HashMap<>(); + Map> kmerDifferingBases = new HashMap<>(); + private final int kmerLength; + private final boolean debug; + private final boolean trimLowQualityBases; + private final byte minTailQuality; + private final int maxMismatchesToCorrect; + private final byte qualityOfCorrectedBases; + private final int maxObservationsForKmerToBeCorrectable; + private final int maxHomopolymerLengthInRegion; + private final int minObservationsForKmerToBeSolid; + + // default values, for debugging + private final static boolean doInplaceErrorCorrection = false; // currently not used, since we want corrected reads to be used only for assembly + private final static int MAX_MISMATCHES_TO_CORRECT = 2; + private final static byte QUALITY_OF_CORRECTED_BASES = 30; // what's a reasonable value here? + private final static int MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE = 1; + private final static boolean TRIM_LOW_QUAL_TAILS = false; + private final static boolean DONT_CORRECT_IN_LONG_HOMOPOLYMERS = false; + private final static int MAX_HOMOPOLYMER_THRESHOLD = 12; + + // debug counter structure + private final ReadErrorCorrectionStats readErrorCorrectionStats = new ReadErrorCorrectionStats(); + + /** + * Create a new kmer corrector + * + * @param kmerLength the length of kmers we'll be counting to error correct, must be >= 1 + * @param maxMismatchesToCorrect e >= 0 + * @param qualityOfCorrectedBases Bases to be corrected will be assigned this quality + */ + public ReadErrorCorrector(final int kmerLength, + final int maxMismatchesToCorrect, + final int maxObservationsForKmerToBeCorrectable, + final byte qualityOfCorrectedBases, + final int minObservationsForKmerToBeSolid, + final boolean trimLowQualityBases, + final byte minTailQuality, + final boolean debug, + final byte[] fullReferenceWithPadding) { + if ( kmerLength < 1 ) throw new IllegalArgumentException("kmerLength must be > 0 but got " + kmerLength); + if ( maxMismatchesToCorrect < 1 ) + throw new IllegalArgumentException("maxMismatchesToCorrect must be >= 1 but got " + maxMismatchesToCorrect); + if ( qualityOfCorrectedBases < 2 || qualityOfCorrectedBases > QualityUtils.MAX_REASONABLE_Q_SCORE) + throw new IllegalArgumentException("qualityOfCorrectedBases must be >= 2 and <= MAX_REASONABLE_Q_SCORE but got " + qualityOfCorrectedBases); + + countsByKMer = new KMerCounter(kmerLength); + this.kmerLength = kmerLength; + this.maxMismatchesToCorrect = maxMismatchesToCorrect; + this.qualityOfCorrectedBases = qualityOfCorrectedBases; + this.minObservationsForKmerToBeSolid = minObservationsForKmerToBeSolid; + this.trimLowQualityBases = trimLowQualityBases; + this.minTailQuality = minTailQuality; + this.debug = debug; + this.maxObservationsForKmerToBeCorrectable = maxObservationsForKmerToBeCorrectable; + + // when region has long homopolymers, we may want not to correct reads, since assessment is complicated, + // so we may decide to skip error correction in these regions + maxHomopolymerLengthInRegion = computeMaxHLen(fullReferenceWithPadding); + } + + /** + * Simple constructor with sensible defaults + * @param kmerLength K-mer length for error correction (not necessarily the same as for assembly graph) + * @param minTailQuality Minimum tail quality: remaining bases with Q's below this value are hard-clipped after correction + * @param debug Output debug information + */ + public ReadErrorCorrector(final int kmerLength, final byte minTailQuality, final int minObservationsForKmerToBeSolid, final boolean debug,final byte[] fullReferenceWithPadding) { + this(kmerLength, MAX_MISMATCHES_TO_CORRECT, MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE, QUALITY_OF_CORRECTED_BASES, minObservationsForKmerToBeSolid, TRIM_LOW_QUAL_TAILS, minTailQuality, debug,fullReferenceWithPadding); + } + + /** + * Main entry routine to add all kmers in a read to the read map counter + * @param read Read to add bases + */ + @Requires("read != null") + protected void addReadKmers(final GATKSAMRecord read) { + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) + return; + + final byte[] readBases = read.getReadBases(); + for (int offset = 0; offset <= readBases.length-kmerLength; offset++ ) { + countsByKMer.addKmer(new Kmer(readBases,offset,kmerLength),1); + + } + } + + /** + * Correct a collection of reads based on stored k-mer counts + * @param reads + */ + public final List correctReads(final Collection reads) { + + final List correctedReads = new ArrayList<>(reads.size()); + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) { + // just copy reads into output and exit + correctedReads.addAll(reads); + } + else { + computeKmerCorrectionMap(); + for (final GATKSAMRecord read: reads) { + final GATKSAMRecord correctedRead = correctRead(read); + if (trimLowQualityBases) + correctedReads.add(ReadClipper.hardClipLowQualEnds(correctedRead, minTailQuality)); + else + correctedReads.add(correctedRead); + } + if (debug) { + logger.info("Number of corrected bases:"+readErrorCorrectionStats.numBasesCorrected); + logger.info("Number of corrected reads:"+readErrorCorrectionStats.numReadsCorrected); + logger.info("Number of skipped reads:"+readErrorCorrectionStats.numReadsUncorrected); + logger.info("Number of solid kmers:"+readErrorCorrectionStats.numSolidKmers); + logger.info("Number of corrected kmers:"+readErrorCorrectionStats.numCorrectedKmers); + logger.info("Number of uncorrectable kmers:"+readErrorCorrectionStats.numUncorrectableKmers); + } + } + return correctedReads; + } + + + /** + * Do actual read correction based on k-mer map. First, loop through stored k-mers to get a list of possible corrections + * for each position in the read. Then correct read based on all possible consistent corrections. + * @param inputRead Read to correct + * @return Corrected read (can be same reference as input if doInplaceErrorCorrection is set) + */ + @Requires("inputRead != null") + private GATKSAMRecord correctRead(final GATKSAMRecord inputRead) { + // no support for reduced reads (which shouldn't need to be error-corrected anyway!) + if (inputRead.isReducedRead()) + return inputRead; + + // do actual correction + boolean corrected = false; + final byte[] correctedBases = inputRead.getReadBases(); + final byte[] correctedQuals = inputRead.getBaseQualities(); + + // array to store list of possible corrections for read + final CorrectionSet correctionSet = buildCorrectionMap(correctedBases); + + for (int offset = 0; offset < correctedBases.length; offset++) { + final Byte b = correctionSet.getConsensusCorrection(offset); + if (b != null && b != correctedBases[offset]) { + correctedBases[offset] = b; + correctedQuals[offset] = qualityOfCorrectedBases; + corrected = true; + } + readErrorCorrectionStats.numBasesCorrected++; + } + + if (corrected) { + readErrorCorrectionStats.numReadsCorrected++; + if (doInplaceErrorCorrection) { + inputRead.setReadBases(correctedBases); + inputRead.setBaseQualities(correctedQuals); + return inputRead; + } + else { + GATKSAMRecord correctedRead = new GATKSAMRecord(inputRead); + + // do the actual correction + // todo - do we need to clone anything else from read? + correctedRead.setBaseQualities(inputRead.getBaseQualities()); + correctedRead.setIsStrandless(inputRead.isStrandless()); + correctedRead.setReadBases(inputRead.getReadBases()); + correctedRead.setReadString(inputRead.getReadString()); + correctedRead.setReadGroup(inputRead.getReadGroup()); + return correctedRead; + } + } + else { + readErrorCorrectionStats.numReadsUncorrected++; + return inputRead; + } + } + + /** + * Build correction map for each of the bases in read. + * For each of the constituent kmers in read: + * a) See whether the kmer has been mapped to a corrected kmer. + * b) If so, get list of differing positions and corresponding bases. + * c) Add then list of new bases to index in correction list. + * Correction list is of read size, and holds a list of bases to correct. + * @param correctedBases Bases to attempt to correct + * @return CorrectionSet object. + */ + @Requires("correctedBases != null") + private CorrectionSet buildCorrectionMap(final byte[] correctedBases) { + // array to store list of possible corrections for read + final CorrectionSet correctionSet = new CorrectionSet(correctedBases.length); + + for (int offset = 0; offset <= correctedBases.length-kmerLength; offset++ ) { + final Kmer kmer = new Kmer(correctedBases,offset,kmerLength); + final Kmer newKmer = kmerCorrectionMap.get(kmer); + if (newKmer != null && !newKmer.equals(kmer)){ + final Pair differingPositions = kmerDifferingBases.get(kmer); + final int[] differingIndeces = differingPositions.first; + final byte[] differingBases = differingPositions.second; + + for (int k=0; k < differingIndeces.length; k++) { + // get list of differing positions for corrected kmer + // for each of these, add correction candidate to correction set + correctionSet.add(offset + differingIndeces[k],differingBases[k]); + } + } + } + return correctionSet; + } + + + /** + * Top-level entry point that adds a collection of reads to our kmer list. + * For each read in list, its constituent kmers will be logged in our kmer table. + * @param reads + */ + @Requires("reads != null") + public void addReadsToKmers(final Collection reads) { + for (final GATKSAMRecord read: reads) + addReadKmers(read); + + if (debug) + for ( final KMerCounter.CountedKmer countedKmer: countsByKMer.getCountedKmers() ) + logger.info(String.format("%s\t%d\n", countedKmer.kmer, countedKmer.count)); + } + + + /** + * For each kmer we've seen, do the following: + * a) If kmer count > threshold1, this kmer is good, so correction map will be to itself. + * b) If kmer count <= threshold2, this kmer is bad. + * In that case, loop through all other kmers. If kmer is good, compute distance, and get minimal distance. + * If such distance is < some threshold, map to this kmer, and record differing positions and bases. + * + */ + private void computeKmerCorrectionMap() { + for (final KMerCounter.CountedKmer storedKmer : countsByKMer.getCountedKmers()) { + if (storedKmer.getCount() >= minObservationsForKmerToBeSolid) { + // this kmer is good: map to itself + kmerCorrectionMap.put(storedKmer.getKmer(),storedKmer.getKmer()); + kmerDifferingBases.put(storedKmer.getKmer(),new Pair<>(new int[0],new byte[0])); // dummy empty array + readErrorCorrectionStats.numSolidKmers++; + } + else if (storedKmer.getCount() <= maxObservationsForKmerToBeCorrectable) { + // loop now thru all other kmers to find nearest neighbor + final Pair> nearestNeighbor = findNearestNeighbor(storedKmer.getKmer(),countsByKMer,maxMismatchesToCorrect); + + // check if nearest neighbor lies in a close vicinity. If so, log the new bases and the correction map + if (nearestNeighbor != null) { // ok, found close neighbor + kmerCorrectionMap.put(storedKmer.getKmer(), nearestNeighbor.first); + kmerDifferingBases.put(storedKmer.getKmer(), nearestNeighbor.second); + readErrorCorrectionStats.numCorrectedKmers++; +// if (debug) +// logger.info("Original kmer:"+storedKmer + "\tCorrected kmer:"+nearestNeighbor.first+"\tDistance:"+dist); + } + else + readErrorCorrectionStats.numUncorrectableKmers++; + + } + } + } + + /** + * Finds nearest neighbor of a given k-mer, among a list of counted K-mers, up to a given distance. + * If many k-mers share same closest distance, an arbitrary k-mer is picked + * @param kmer K-mer of interest + * @param countsByKMer KMerCounter storing set of counted k-mers (may include kmer of interest) + * @param maxDistance Maximum distance to search + * @return Pair of values: closest K-mer in Hamming distance and list of differing bases. + * If no neighbor can be found up to given distance, returns null + */ + @Requires({"kmer != null", "countsByKMer != null","maxDistance >= 1"}) + private Pair> findNearestNeighbor(final Kmer kmer, + final KMerCounter countsByKMer, + final int maxDistance) { + int minimumDistance = Integer.MAX_VALUE; + Kmer closestKmer = null; + + final int[] differingIndeces = new int[maxDistance+1]; + final byte[] differingBases = new byte[maxDistance+1]; + + final int[] closestDifferingIndices = new int[maxDistance+1]; + final byte[] closestDifferingBases = new byte[maxDistance+1]; + + for (final KMerCounter.CountedKmer candidateKmer : countsByKMer.getCountedKmers()) { + // skip if candidate set includes test kmer + if (candidateKmer.getKmer().equals(kmer)) + continue; + + final int hammingDistance = kmer.getDifferingPositions(candidateKmer.getKmer(), maxDistance, differingIndeces, differingBases); + if (hammingDistance < 0) // can't compare kmer? skip + continue; + + if (hammingDistance < minimumDistance) { + minimumDistance = hammingDistance; + closestKmer = candidateKmer.getKmer(); + System.arraycopy(differingBases,0,closestDifferingBases,0,differingBases.length); + System.arraycopy(differingIndeces,0,closestDifferingIndices,0,differingIndeces.length); + } + } + return new Pair<>(closestKmer, new Pair<>(closestDifferingIndices,closestDifferingBases)); + } + + + /** + * experimental function to compute max homopolymer length in a given reference context + * @param fullReferenceWithPadding Reference context of interest + * @return Max homopolymer length in region + */ + @Requires("fullReferenceWithPadding != null") + private static int computeMaxHLen(final byte[] fullReferenceWithPadding) { + + int leftRun = 1; + int maxRun = 1; + for ( int i = 1; i < fullReferenceWithPadding.length; i++) { + if ( fullReferenceWithPadding[i] == fullReferenceWithPadding[i-1] ) + leftRun++; + else + leftRun = 1; + } + if (leftRun > maxRun) + maxRun = leftRun; + + + return maxRun; + } + + private static final class ReadErrorCorrectionStats { + public int numReadsCorrected; + public int numReadsUncorrected; + public int numBasesCorrected; + public int numSolidKmers; + public int numUncorrectableKmers; + public int numCorrectedKmers; + } + + /** + * Wrapper utility class that holds, for each position in read, a list of bytes representing candidate corrections. + * So, a read ACAGT where the middle A has found to be errorful might look like: + * 0: {} + * 1: {} + * 2: {'C','C','C'} + * 3: {} + * 4: {} + * + * It's up to the method getConsensusCorrection() to decide how to use the correction sets for each position. + * By default, only strict consensus is allowed right now. + * + */ + protected static class CorrectionSet { + private final int size; + private ArrayList> corrections; + + /** + * Main class constructor. + * @param size Size of correction set, needs to be set equal to the read being corrected + */ + public CorrectionSet(final int size) { + this.size = size; + corrections = new ArrayList<>(size); + for (int k=0; k < size; k++) + corrections.add(k,new ArrayList()); + } + + /** + * Add a base to this correction set at a particular offset, measured from the start of the read + * @param offset Offset from start of read + * @param base base to be added to list of corrections at this offset + */ + public void add(final int offset, final byte base) { + if (offset >= size || offset < 0) + throw new IllegalStateException("Bad entry into CorrectionSet: offset > size"); + if (!BaseUtils.isRegularBase(base)) + return; // no irregular base correction + + final List storedBytes = corrections.get(offset); + storedBytes.add(base); + } + + /** + * Get list of corrections for a particular offset + * @param offset Offset of interest + * @return List of bases representing possible corrections at this offset + */ + public List get(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.get(): offset must be < size"); + return corrections.get(offset); + } + + /** + * Get consensus correction for a particular offset. In this implementation, it just boils down to seeing if + * byte list associated with offset has identical values. If so, return this base, otherwise return null. + * @param offset + * @return Consensus base, or null if no consensus possible. + */ + public Byte getConsensusCorrection(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.getConsensusCorrection(): offset must be < size"); + final List storedBytes = corrections.get(offset); + if (storedBytes.isEmpty()) + return null; + + // todo - is there a cheaper/nicer way to compare if all elements in list are identical?? + final byte lastBase = storedBytes.remove(storedBytes.size()-1); + for (final Byte b: storedBytes) { + // strict correction rule: all bases must match + if (b != lastBase) + return null; + } + + // all bytes then are equal: + return lastBase; + + } + + + + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java index 989c38628..116c987a6 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java @@ -47,13 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.*; public class KmerUnitTest extends BaseTest { @DataProvider(name = "KMerCreationData") @@ -130,4 +129,40 @@ public class KmerUnitTest extends BaseTest { } } } + + @Test + public void testDifferingPositions() { + final String bases = "ACGTCAGACGTACGTTTGACGTCAGACGTACGT"; + final Kmer baseKmer = new Kmer(bases.getBytes()); + + + final int NUM_TEST_CASES = 30; + + for (int test = 0; test < NUM_TEST_CASES; test++) { + + final int numBasesToChange = test % bases.length(); + + // changes numBasesToChange bases - spread regularly through read string + final int step = (numBasesToChange > 0?Math.min(bases.length() / numBasesToChange,1) : 1); + + final byte[] newBases = bases.getBytes().clone(); + int actualChangedBases =0; // could be different from numBasesToChange due to roundoff + for (int idx=0; idx < numBasesToChange; idx+=step) { + // now change given positions + newBases[idx] = (newBases[idx] == (byte)'A'? (byte)'T':(byte)'A'); + actualChangedBases++; + } + + // compute changed positions + final int[] differingIndices = new int[newBases.length]; + final byte[] differingBases = new byte[newBases.length]; + final int numDiffs = baseKmer.getDifferingPositions(new Kmer(newBases),newBases.length,differingIndices,differingBases); + Assert.assertEquals(numDiffs,actualChangedBases); + for (int k=0; k < numDiffs; k++) { + final int idx = differingIndices[k]; + Assert.assertTrue(newBases[idx] != bases.getBytes()[idx]); + Assert.assertEquals(differingBases[idx],newBases[idx]); + } + } + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java index 74361de1b..9f6013235 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java @@ -227,7 +227,7 @@ public class LocalAssemblyEngineUnitTest extends BaseTest { activeRegion.addAll(reads); final LocalAssemblyEngine engine = createAssembler(assembler); // logger.warn("Assembling " + activeRegion + " with " + engine); - return engine.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.emptyList()); + return engine.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.emptyList(), null); } @DataProvider(name = "SimpleAssemblyTestData") diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java new file mode 100644 index 000000000..e201b24fc --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java @@ -0,0 +1,190 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ReadErrorCorrectorUnitTest { + private static final boolean debug = true; + final String refChunk = "GCATAAACATGGCTCACTGC"; + final String refChunkHard = "AGCCTTGAACTCCTGGGCTCAAGTGATCCTCCTGCCTCAGTTTCCCATGTAGCTGGGACCACAGGTGGGGGCTCCACCCCTGGCTGATTTTTTTTTTTTTTTTTTTTTGAGATAGGGT"; + + @Test + public void TestBasicCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + int offset = 2; + for (int k=0; k < numCorrections; k++) { + // introduce one correction at a random offset in array. To make testing easier, we will replicate corrrection + final byte base = trueBases[offset]; + correctionSet.add(offset, base); + // skip to some other offset + offset += 7; + if (offset >= trueBases.length) + offset -= trueBases.length; + } + + for (int k=0; k < trueBases.length; k++) { + final byte corr = correctionSet.getConsensusCorrection(k); + Assert.assertEquals(corr, trueBases[k]); + } + } + + @Test + public void TestExtendedCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + for (int offset=0; offset < trueBases.length; offset++) { + // insert k corrections at offset k and make sure we get exactly k bases back + for (int k=0; k < offset; k++) + correctionSet.add(offset,trueBases[offset]); + + } + + for (int offset=0; offset < trueBases.length; offset++) { + Assert.assertEquals(correctionSet.get(offset).size(),offset); + } + } + + @Test + public void TestAddReadsToKmers() { + final int NUM_GOOD_READS = 500; + + final String bases = "AAAAAAAAAAAAAAA"; + final int READ_LENGTH = bases.length(); + final int kmerLengthForReadErrorCorrection = READ_LENGTH; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases.getBytes(), quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + } + + ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + + // special trivial case: kmer length is equal to read length. + // K-mer counter should hold then exactly one kmer + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertTrue(Arrays.equals( kmer.getKmer().bases(),bases.getBytes())); + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS); + } + + // special case 2: kmers are all the same but length < read length. + // Each kmer is added then readLength-kmerLength+1 times + final int KMER_LENGTH = 10; + readErrorCorrector = new ReadErrorCorrector(KMER_LENGTH,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS*(READ_LENGTH-KMER_LENGTH+1)); + } + + } + @Test + public void TestBasicErrorCorrection() { + final int NUM_GOOD_READS = 500; + final int NUM_BAD_READS = 10; + final int READ_LENGTH = 15; + final int kmerLengthForReadErrorCorrection = 10; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final byte[] bases = Arrays.copyOfRange(refChunk.getBytes(),offset,offset+READ_LENGTH); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + offset++; + if (offset >= refChunk.length()-READ_LENGTH) + offset = 0; + } + offset = 2; + // coverage profile is now perfectly triangular with "good" bases. Inject now bad bases with errors in them. + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] bases = finalizedReadList.get(k).getReadBases().clone(); + bases[offset] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, READ_LENGTH + "M"); + finalizedReadList.add(read); + offset += 7; + if (offset >= READ_LENGTH) + offset = 4; // just some randomly circulating offset for error position + } + + // now correct all reads + final ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + readErrorCorrector.correctReads(finalizedReadList); + + // check that corrected reads have exactly same content as original reads + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] badBases = finalizedReadList.get(k).getReadBases(); + final byte[] originalBases = finalizedReadList.get(k).getReadBases(); + Assert.assertTrue(Arrays.equals(badBases,originalBases)); + } + } +} From dadcfe296dff64dae226aad1da57da2c512c3870 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Wed, 5 Jun 2013 14:26:23 -0400 Subject: [PATCH 49/99] Reworking of the dangling tails merging code. We now run Smith-Waterman on the dangling tail against the corresponding reference tail. If we can generate a reasonable, low entropy alignment then we trigger the merge to the reference path; otherwise we abort. Also, we put in a check for low-complexity of graphs and don't let those pass through. Added tests for this implementation that checks exact SW results and correct edges added. --- .../haplotypecaller/graphs/BaseGraph.java | 18 ++ .../haplotypecaller/graphs/GraphUtils.java | 10 +- .../walkers/haplotypecaller/graphs/Path.java | 3 +- .../readthreading/ReadThreadingAssembler.java | 28 ++- .../readthreading/ReadThreadingGraph.java | 207 ++++++++++++++---- .../graphs/BaseGraphUnitTest.java | 15 ++ .../ReadThreadingAssemblerUnitTest.java | 3 +- .../ReadThreadingGraphUnitTest.java | 76 +++++++ .../sting/utils/sam/AlignmentUtils.java | 17 ++ .../smithwaterman/SWPairwiseAlignment.java | 15 ++ .../utils/sam/AlignmentUtilsUnitTest.java | 7 + 11 files changed, 339 insertions(+), 60 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index c963fb6e5..70ef539f3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -676,6 +676,24 @@ public class BaseGraph extends Default '}'; } + /** + * The base sequence for the given path. + * Note, this assumes that the path does not start with a source node. + * + * @param path the list of vertexes that make up the path + * @return non-null sequence of bases corresponding to the given path + */ + @Ensures({"result != null"}) + public byte[] getBasesForPath(final List path) { + if ( path == null ) throw new IllegalArgumentException("Path cannot be null"); + + final StringBuffer sb = new StringBuffer(); + for ( final DeBruijnVertex v : path ) + sb.append((char)v.getSuffix()); + + return sb.toString().getBytes(); + } + /** * Get the set of vertices within distance edges of source, regardless of edge direction * diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java index 4aa6047a9..73a1daa3e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java @@ -171,7 +171,15 @@ final public class GraphUtils { return foundDup ? null : new PrimitivePair.Int(longestPos, length); } - private static int longestSuffixMatch(final byte[] seq, final byte[] kmer, final int seqStart) { + /** + * calculates the longest suffix match between a sequence and a smaller kmer + * + * @param seq the (reference) sequence + * @param kmer the smaller kmer sequence + * @param seqStart the index (inclusive) on seq to start looking backwards from + * @return the longest matching suffix + */ + public static int longestSuffixMatch(final byte[] seq, final byte[] kmer, final int seqStart) { for ( int len = 1; len <= kmer.length; len++ ) { final int seqI = seqStart - len + 1; final int kmerI = kmer.length - len; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java index a07b98bb6..2e84e1d22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; -import com.google.java.contract.Requires; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; @@ -92,7 +91,7 @@ public class Path { /** * Create a new Path containing no edges and starting at initialVertex * @param initialVertex the starting vertex of the path - * @param graph the graph this path with follow through + * @param graph the graph this path will follow through */ public Path(final T initialVertex, final BaseGraph graph) { if ( initialVertex == null ) throw new IllegalArgumentException("initialVertex cannot be null"); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index 0887929ab..f4290f2bb 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -55,7 +55,6 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.io.File; import java.util.Arrays; -import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -89,7 +88,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { } public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { - this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, false); + this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true); } /** for testing purposes */ @@ -103,7 +102,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // first, try using the requested kmer sizes for ( final int kmerSize : kmerSizes ) { - final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes); + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, dontIncreaseKmerSizesForCycles); if ( graph != null ) graphs.add(graph); } @@ -113,7 +112,8 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { int kmerSize = MathUtils.arrayMaxInt(kmerSizes) + KMER_SIZE_ITERATION_INCREASE; int numIterations = 1; while ( graphs.isEmpty() && numIterations <= MAX_KMER_ITERATIONS_TO_ATTEMPT ) { - final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes); + // on the last attempt we will allow low complexity graphs + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, numIterations == MAX_KMER_ITERATIONS_TO_ATTEMPT); if ( graph != null ) graphs.add(graph); kmerSize += KMER_SIZE_ITERATION_INCREASE; @@ -131,9 +131,14 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { * @param refHaplotype reference haplotype * @param kmerSize kmer size * @param activeAlleleHaplotypes the GGA haplotypes to inject into the graph - * @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths) + * @param allowLowComplexityGraphs if true, do not check for low-complexity graphs + * @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths or is low complexity) */ - protected SeqGraph createGraph(final List reads, final Haplotype refHaplotype, final int kmerSize, final List activeAlleleHaplotypes) { + protected SeqGraph createGraph(final List reads, + final Haplotype refHaplotype, + final int kmerSize, + final List activeAlleleHaplotypes, + final boolean allowLowComplexityGraphs) { final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly); // add the reference sequence to the graph @@ -157,7 +162,13 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { // sanity check: make sure there are no cycles in the graph if ( rtgraph.hasCycles() ) { - if ( debug ) logger.info("Not using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler because it contains a cycle"); + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it contains a cycle"); + return null; + } + + // sanity check: make sure the graph had enough complexity with the given kmer + if ( ! allowLowComplexityGraphs && rtgraph.isLowComplexity() ) { + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it does not produce a graph with enough complexity"); return null; } @@ -169,8 +180,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { rtgraph.pruneLowWeightChains(pruneFactor); // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if - // we can recover them by merging some N bases from the chain back into the reference uniquely, for - // N < kmerSize + // we can recover them by merging some N bases from the chain back into the reference if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); // remove all heading and trailing paths diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index ab6b17c35..8d8cb83f6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -46,14 +46,19 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.KMerCounter; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.collections.Pair; -import org.broadinstitute.sting.utils.collections.PrimitivePair; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; +import org.broadinstitute.sting.utils.smithwaterman.SmithWaterman; import org.jgrapht.EdgeFactory; import org.jgrapht.alg.CycleDetector; @@ -80,9 +85,6 @@ public class ReadThreadingGraph extends BaseGraph their corresponding vertex in the graph */ - private Map uniqueKmers = new LinkedHashMap(); + private Map uniqueKmers = new LinkedHashMap<>(); /** * @@ -113,8 +115,6 @@ public class ReadThreadingGraph extends BaseGraph danglingPath, referencePath; + final byte[] danglingPathString, referencePathString; + final Cigar cigar; + + public DanglingTailMergeResult(final List danglingPath, + final List referencePath, + final byte[] danglingPathString, + final byte[] referencePathString, + final Cigar cigar) { + this.danglingPath = danglingPath; + this.referencePath = referencePath; + this.danglingPathString = danglingPathString; + this.referencePathString = referencePathString; + this.cigar = cigar; + } + } + + /** + * Attempt to attach vertex with out-degree == 0 to the graph + * * @param vertex the vertex to recover + * @return 1 if we successfully recovered the vertex and 0 otherwise */ protected int recoverDanglingChain(final MultiDeBruijnVertex vertex) { if ( outDegreeOf(vertex) != 0 ) throw new IllegalStateException("Attempting to recover a dangling tail for " + vertex + " but it has out-degree > 0"); - final byte[] kmer = vertex.getSequence(); - if ( ! nonUniqueKmers.contains(new Kmer(kmer)) ) { - // don't attempt to fix non-unique kmers! - final MultiDeBruijnVertex uniqueMergePoint = danglingTailMergePoint(kmer); - if ( uniqueMergePoint != null ) { - addEdge(vertex, uniqueMergePoint, new MultiSampleEdge(false, 1)); - return 1; - } - } + // generate the CIGAR string from Smith-Waterman between the dangling tail and reference paths + final DanglingTailMergeResult danglingTailMergeResult = generateCigarAgainstReferencePath(vertex); - return 0; + // if the CIGAR is too complex (or couldn't be computed) then we do not allow the merge into the reference path + if ( danglingTailMergeResult == null || ! cigarIsOkayToMerge(danglingTailMergeResult.cigar) ) + return 0; + + // merge + return mergeDanglingTail(danglingTailMergeResult); } /** - * Find a unique merge point for kmer in the reference sequence - * @param kmer the full kmer of the dangling tail - * @return a vertex appropriate to merge kmer into, or null if none could be found + * Determine whether the provided cigar is okay to merge into the reference path + * + * @param cigar the cigar to analyze + * @return true if it's okay to merge, false otherwise */ - private MultiDeBruijnVertex danglingTailMergePoint(final byte[] kmer) { - final PrimitivePair.Int endAndLength = GraphUtils.findLongestUniqueSuffixMatch(refSeq, kmer); - if ( endAndLength != null && endAndLength.second >= MIN_MATCH_LENGTH_TO_RECOVER_DANGLING_TAIL && endAndLength.first + 1 < refKmers.length) { - final int len = endAndLength.second; - final MultiDeBruijnVertex mergePoint = refKmers[endAndLength.first + 1]; -// logger.info("recoverDanglingChain of kmer " + new String(kmer) + " merged to " + mergePoint + " with match size " + len); - final Set nonUniquesAtLength = determineKmerSizeAndNonUniques(len, len).nonUniques; - final Kmer matchedKmer = new Kmer(kmer, kmer.length - len, len); - if ( nonUniquesAtLength.contains(matchedKmer) ) { -// logger.info("Rejecting merge " + new String(kmer) + " because match kmer " + matchedKmer + " isn't unique across all reads"); - return null; - } else { - return mergePoint; - } + protected boolean cigarIsOkayToMerge(final Cigar cigar) { + + final List elements = cigar.getCigarElements(); + + // don't allow more than a couple of different ops + if ( elements.size() > 3 ) + return false; + + // the last element must be an M + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.M ) + return false; + + // TODO -- do we want to check whether the Ms mismatch too much also? + + return true; + } + + /** + * Actually merge the dangling tail if possible + * + * @param danglingTailMergeResult the result from generating a Cigar for the dangling tail against the reference + * @return 1 if merge was successful, 0 otherwise + */ + protected int mergeDanglingTail(final DanglingTailMergeResult danglingTailMergeResult) { + + final List elements = danglingTailMergeResult.cigar.getCigarElements(); + final CigarElement lastElement = elements.get(elements.size() - 1); + if ( lastElement.getOperator() != CigarOperator.M ) + throw new IllegalArgumentException("The last Cigar element must be an M"); + + final int lastRefIndex = danglingTailMergeResult.cigar.getReferenceLength() - 1; + final int matchingSuffix = Math.min(GraphUtils.longestSuffixMatch(danglingTailMergeResult.referencePathString, danglingTailMergeResult.danglingPathString, lastRefIndex), lastElement.getLength()); + if ( matchingSuffix == 0 ) + return 0; + + final int altIndexToMerge = Math.max(danglingTailMergeResult.cigar.getReadLength() - matchingSuffix - 1, 0); + final int refIndexToMerge = lastRefIndex - matchingSuffix + 1; + addEdge(danglingTailMergeResult.danglingPath.get(altIndexToMerge), danglingTailMergeResult.referencePath.get(refIndexToMerge), new MultiSampleEdge(false, 1)); + return 1; + } + + /** + * Generates the CIGAR string from the Smith-Waterman alignment of the dangling path (where the + * provided vertex is the sink) and the reference path. + * + * @param vertex the sink of the dangling tail + * @return a SmithWaterman object which can be null if no proper alignment could be generated + */ + protected DanglingTailMergeResult generateCigarAgainstReferencePath(final MultiDeBruijnVertex vertex) { + + // find the lowest common ancestor path between vertex and the reference sink if available + final List altPath = findPathToLowestCommonAncestorOfReference(vertex); + if ( altPath == null ) + return null; + + // now get the reference path from the LCA + final List refPath = getReferencePath(altPath.get(0)); + + // create the Smith-Waterman strings to use + final byte[] refBases = getBasesForPath(refPath); + final byte[] altBases = getBasesForPath(altPath); + + // run Smith-Waterman to determine the best alignment (and remove trailing deletions since they aren't interesting) + final SmithWaterman alignment = new SWPairwiseAlignment(refBases, altBases, SWPairwiseAlignment.OVERHANG_STRATEGY.INDEL); + return new DanglingTailMergeResult(altPath, refPath, altBases, refBases, AlignmentUtils.removeTrailingDeletions(alignment.getCigar())); + } + + /** + * Finds the path upwards in the graph from this vertex to the reference sequence, including the lowest common ancestor vertex + * + * @param vertex the original vertex + * @return the path if it can be determined or null if this vertex either doesn't merge onto the reference path or + * has an ancestor with multiple incoming edges before hitting the reference path + */ + protected List findPathToLowestCommonAncestorOfReference(final MultiDeBruijnVertex vertex) { + final LinkedList path = new LinkedList<>(); + + MultiDeBruijnVertex v = vertex; + while ( ! isReferenceNode(v) && inDegreeOf(v) == 1 ) { + path.addFirst(v); + v = getEdgeSource(incomingEdgeOf(v)); + } + path.addFirst(v); + + return isReferenceNode(v) ? path : null; + } + + /** + * Finds the path downwards in the graph from this vertex to the reference sink, including this vertex + * + * @param start the reference vertex to start from + * @return the path (non-null, non-empty) + */ + protected List getReferencePath(final MultiDeBruijnVertex start) { + if ( ! isReferenceNode(start) ) throw new IllegalArgumentException("Cannot construct the reference path from a vertex that is not on that path"); + + final List path = new ArrayList<>(); + + MultiDeBruijnVertex v = start; + while ( v != null ) { + path.add(v); + v = getNextReferenceVertex(v); } - return null; + return path; } /** @@ -330,6 +432,16 @@ public class ReadThreadingGraph extends BaseGraph(this).detectCycles(); } + /** + * Does the graph not have enough complexity? We define low complexity as a situation where the number + * of non-unique kmers is more than 20% of the total number of kmers. + * + * @return true if the graph has low complexity, false otherwise + */ + public boolean isLowComplexity() { + return nonUniqueKmers.size() * 4 > uniqueKmers.size(); + } + public void recoverDanglingTails() { if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingTails requires the graph be already built"); @@ -341,7 +453,8 @@ public class ReadThreadingGraph extends BaseGraph vertexes = new ArrayList<>(); + for ( int i = 0; i <= testString.length() - kmerSize; i++ ) { + vertexes.add(new DeBruijnVertex(testString.substring(i, i + kmerSize))); + } + + final String result = new String(new DeBruijnGraph().getBasesForPath(vertexes)); + Assert.assertEquals(result, testString.substring(kmerSize - 1)); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java index 3f10fc72c..8269b9c20 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java @@ -83,7 +83,8 @@ public class ReadThreadingAssemblerUnitTest extends BaseTest { } public SeqGraph assemble() { - assembler.removePathsNotConnectedToRef = false; // need to pass some of the tests + assembler.removePathsNotConnectedToRef = false; // needed to pass some of the tests + assembler.setRecoverDanglingTails(false); // needed to pass some of the tests assembler.setDebugGraphTransformations(true); final SeqGraph graph = assembler.assemble(reads, refHaplotype, Collections.emptyList()).get(0); if ( DEBUG ) graph.printGraph(new File("test.dot"), 0); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java index 67ee52734..ed91cccb3 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java @@ -53,6 +53,7 @@ import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; @@ -201,6 +202,81 @@ public class ReadThreadingGraphUnitTest extends BaseTest { Assert.assertEquals(pathFinder.getKBestPaths(graph, length, graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex()).size(), 1); } + @DataProvider(name = "DanglingTails") + public Object[][] makeDanglingTailsData() { + List tests = new ArrayList(); + + // add 1M to the expected CIGAR because it includes the previous (common) base too + tests.add(new Object[]{"AAAAAAAAAA", "CAAA", "5M", true, 3}); // incomplete haplotype + tests.add(new Object[]{"AAAAAAAAAA", "CAAAAAAAAAA", "1M1I10M", true, 10}); // insertion + tests.add(new Object[]{"CCAAAAAAAAAA", "AAAAAAAAAA", "1M2D10M", true, 10}); // deletion + tests.add(new Object[]{"AAAAAAAA", "CAAAAAAA", "9M", true, 7}); // 1 snp + tests.add(new Object[]{"AAAAAAAA", "CAAGATAA", "9M", true, 2}); // several snps + tests.add(new Object[]{"AAAAA", "C", "1M4D1M", true, -1}); // funky SW alignment + tests.add(new Object[]{"AAAAA", "CA", "1M3D2M", true, 1}); // very little data + tests.add(new Object[]{"AAAAAAA", "CAAAAAC", "8M", true, -1}); // ends in mismatch + tests.add(new Object[]{"AAAAAA", "CGAAAACGAA", "1M2I4M2I2M", false, 0}); // alignment is too complex + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "DanglingTails", enabled = !DEBUG) + public void testDanglingTails(final String refEnd, + final String altEnd, + final String cigar, + final boolean cigarIsGood, + final int mergePointDistanceFromSink) { + + final int kmerSize = 15; + + // construct the haplotypes + final String commonPrefix = "AAAAAAAAAACCCCCCCCCCGGGGGGGGGGTTTTTTTTTT"; + final String ref = commonPrefix + refEnd; + final String alt = commonPrefix + altEnd; + + // create the graph and populate it + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize); + rtgraph.addSequence("ref", ref.getBytes(), null, true); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(alt.getBytes(), Utils.dupBytes((byte) 30, alt.length()), alt.length() + "M"); + rtgraph.addRead(read); + rtgraph.buildGraphIfNecessary(); + + // confirm that we have just a single dangling tail + MultiDeBruijnVertex altSink = null; + for ( final MultiDeBruijnVertex v : rtgraph.vertexSet() ) { + if ( rtgraph.isSink(v) && !rtgraph.isReferenceNode(v) ) { + Assert.assertTrue(altSink == null, "We found more than one non-reference sink"); + altSink = v; + } + } + + Assert.assertTrue(altSink != null, "We did not find a non-reference sink"); + + // confirm that the SW alignment agrees with our expectations + final ReadThreadingGraph.DanglingTailMergeResult result = rtgraph.generateCigarAgainstReferencePath(altSink); + Assert.assertTrue(cigar.equals(result.cigar.toString()), "SW generated cigar = " + result.cigar.toString()); + + // confirm that the goodness of the cigar agrees with our expectations + Assert.assertEquals(rtgraph.cigarIsOkayToMerge(result.cigar), cigarIsGood); + + // confirm that the tail merging works as expected + if ( cigarIsGood ) { + final int mergeResult = rtgraph.mergeDanglingTail(result); + Assert.assertTrue(mergeResult == 1 || mergePointDistanceFromSink == -1); + + // confirm that we created the appropriate edge + if ( mergePointDistanceFromSink >= 0 ) { + MultiDeBruijnVertex v = altSink; + for ( int i = 0; i < mergePointDistanceFromSink; i++ ) { + if ( rtgraph.inDegreeOf(v) != 1 ) + Assert.fail("Encountered vertex with multiple sources"); + v = rtgraph.getEdgeSource(rtgraph.incomingEdgeOf(v)); + } + Assert.assertTrue(rtgraph.outDegreeOf(v) > 1); + } + } + } + // TODO -- update to use determineKmerSizeAndNonUniques directly // @DataProvider(name = "KmerSizeData") diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index fa35e3f53..762ce4858 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -800,6 +800,23 @@ public final class AlignmentUtils { return new Cigar(elements); } + /** + * Removing a trailing deletion from the incoming cigar if present + * + * @param c the cigar we want to update + * @return a non-null Cigar + */ + @Requires("c != null") + @Ensures("result != null") + public static Cigar removeTrailingDeletions(final Cigar c) { + + final List elements = c.getCigarElements(); + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.D ) + return c; + + return new Cigar(elements.subList(0, elements.size() - 1)); + } + /** * Move the indel in a given cigar string one base to the left * diff --git a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java index 84c33d4a5..1abf9f836 100644 --- a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java +++ b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java @@ -118,6 +118,21 @@ public class SWPairwiseAlignment implements SmithWaterman { align(seq1,seq2); } + /** + * Create a new SW pairwise aligner + * + * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2) + * + * @param seq1 the first sequence we want to align + * @param seq2 the second sequence we want to align + * @param strategy the overhang strategy to use + */ + public SWPairwiseAlignment(final byte[] seq1, final byte[] seq2, final OVERHANG_STRATEGY strategy) { + this(SWParameterSet.ORIGINAL_DEFAULT.parameters); + overhang_strategy = strategy; + align(seq1, seq2); + } + /** * Create a new SW pairwise aligner, without actually doing any alignment yet * diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java index e7d54c460..fbf0242a3 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java @@ -1033,5 +1033,12 @@ public class AlignmentUtilsUnitTest { Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.getSingleton().decode(cigar)), expected); } + @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true) + public void testRemoveTrailingDeletions(final String cigar, final boolean expected) { + final Cigar originalCigar = TextCigarCodec.getSingleton().decode(cigar); + final Cigar newCigar = AlignmentUtils.removeTrailingDeletions(originalCigar); + + Assert.assertEquals(originalCigar.equals(newCigar), !cigar.endsWith("D")); + } } From 2c3c680eb704348cb8e20572f5b9d2fb3a5a986c Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Wed, 5 Jun 2013 12:22:14 -0400 Subject: [PATCH 50/99] Misc changes and cleanup from all previous commits in this push. 1. By default, do not include the UG CEU callset for assessment. 2. Updated md5s that are different now with all the HC changes. --- ...lexAndSymbolicVariantsIntegrationTest.java | 6 +++--- .../HaplotypeCallerIntegrationTest.java | 20 +++++++++---------- ...aplotypeCallerParallelIntegrationTest.java | 2 +- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index fba294c3d..073d54ec5 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "8d7728909b1b8eb3f30f2f1583f054a8"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "d21f15a5809fe5259af41ae6774af6f1"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "db71826dc798ff1cdf0c5d05b0ede976"); + "d4a0797c2fd4c103bf9a137633376156"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "42831d5463552911b7da9de0b4a27289"); + "a9872228d0275a30f5a1f7e070a9c9f4"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 904f15728..dbdd0afcd 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -78,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "1b15e4647013ab2c3ce7073c420d8640"); + HCTest(CEUTRIO_BAM, "", "e9167a1bfc0fc276586788d1ce1be408"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "423be27dc2cf7fd10baf465cf93e18e2"); + HCTest(NA12878_BAM, "", "b1d46afb9659ac3b92a3d131b58924ef"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -94,7 +94,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "a28e6f14e28708283d61c1e423bbdcb1"); + "d83856b8136776bd731a8037c16b71fa"); } @Test @@ -110,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "8344d86751b707c53b296c297eba4bfa"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "70c4476816f5d35c9978c378dbeac09b"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -147,7 +147,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "dea98f257d39fa1447a12c36a6bbf4a3"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "947aae309ecab7cd3f17ff9810884924"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -157,14 +157,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void HCTestProblematicReadsModifiedInActiveRegions() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("7cd1c5e2642ae8ddf38932aba1f51d69")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("0689d2c202849fd05617648eaf429b9a")); executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec); } @Test public void HCTestStructuralIndels() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "AFR.structural.indels.bam") + " --no_cmdline_in_header -o %s -minPruning 6 -L 20:8187565-8187800 -L 20:18670537-18670730"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("ee55ff4c6ec1bbef88e21cc0f45d4c47")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("91717e5e271742c2c9b67223e58f1320")); executeTest("HCTestStructuralIndels: ", spec); } @@ -186,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("4886a98bf699f4e7f4491160749ada6a")); + Arrays.asList("0124c4923d96ec0f8222b596dd4ef534")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -194,7 +194,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("86bdd07a3ac4f6ce239c30efea8bf5ba")); + Arrays.asList("0e020dcfdf249225714f5cd86ed3869f")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } @@ -208,7 +208,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestDBSNPAnnotationWGS() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, - Arrays.asList("7b23a288a31cafca3946f14f2381e7cb")); + Arrays.asList("446a786bb539f3ec2084dd75167568aa")); executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java index ff5a501cc..62e685eab 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java @@ -61,7 +61,7 @@ public class HaplotypeCallerParallelIntegrationTest extends WalkerTest { List tests = new ArrayList(); for ( final int nct : Arrays.asList(1, 2, 4) ) { - tests.add(new Object[]{nct, "c277fd65365d59b734260dd8423313bb"}); + tests.add(new Object[]{nct, "ef42a438b82681d1c0f921c57e16ff12"}); } return tests.toArray(new Object[][]{}); From 95b5f99feb89e06ac0e09ae490552b8de926007b Mon Sep 17 00:00:00 2001 From: David Roazen Date: Wed, 5 Jun 2013 15:55:43 -0400 Subject: [PATCH 51/99] Exclude reduced reads from elimination during downsampling Problem: -Downsamplers were treating reduced reads the same as normal reads, with occasionally catastrophic results on variant calling when an entire reduced read happened to get eliminated. Solution: -Since reduced reads lack the information we need to do position-based downsampling on them, best available option for now is to simply exempt all reduced reads from elimination during downsampling. Details: -Add generic capability of exempting items from elimination to the Downsampler interface via new doNotDiscardItem() method. Default inherited version of this method exempts all reduced reads (or objects encapsulating reduced reads) from elimination. -Switch from interfaces to abstract classes to facilitate this change, and do some minor refactoring of the Downsampler interface (push implementation of some methods into the abstract classes, improve names of the confusing clear() and reset() methods). -Rewrite TAROrderedReadCache. This class was incorrectly relying on the ReservoirDownsampler to preserve the relative ordering of items in some circumstances, which was behavior not guaranteed by the API and only happened to work due to implementation details which no longer apply. Restructured this class around the assumption that the ReservoirDownsampler will not preserve relative ordering at all. -Add disclaimer to description of -dcov argument explaining that coverage targets are approximate goals that will not always be precisely met. -Unit tests for all individual downsamplers to verify that reduced reads are exempted from elimination --- .../arguments/GATKArgumentCollection.java | 9 +- .../sting/gatk/downsampling/Downsampler.java | 69 +++++++++--- .../downsampling/FractionalDownsampler.java | 47 ++++---- .../downsampling/LevelingDownsampler.java | 66 ++++++------ .../downsampling/PassThroughDownsampler.java | 35 +++--- .../gatk/downsampling/ReadsDownsampler.java | 6 +- .../downsampling/ReservoirDownsampler.java | 102 +++++++++++------- .../SimplePositionalDownsampler.java | 60 +++++------ .../gatk/traversals/TAROrderedReadCache.java | 92 +++++++++++----- .../locusiterator/AlignmentStateMachine.java | 9 ++ .../PerSampleReadStateManager.java | 2 +- .../locusiterator/SamplePartitioner.java | 4 +- .../sting/utils/sam/ArtificialSAMUtils.java | 25 +++++ .../FractionalDownsamplerUnitTest.java | 35 +++++- .../LevelingDownsamplerUnitTest.java | 48 ++++++++- .../ReservoirDownsamplerUnitTest.java | 45 +++++++- .../SimplePositionalDownsamplerUnitTest.java | 46 +++++++- .../TAROrderedReadCacheUnitTest.java | 50 ++++++++- 18 files changed, 545 insertions(+), 205 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index 8d1fa4638..dc3d67283 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -125,7 +125,14 @@ public class GATKArgumentCollection { @Argument(fullName = "downsample_to_fraction", shortName = "dfrac", doc = "Fraction [0.0-1.0] of reads to downsample to", required = false) public Double downsampleFraction = null; - @Argument(fullName = "downsample_to_coverage", shortName = "dcov", doc = "Coverage [integer] to downsample to at any given locus; note that downsampled reads are randomly selected from all possible reads at a locus. For non-locus-based traversals (eg., ReadWalkers), this sets the maximum number of reads at each alignment start position.", required = false) + @Argument(fullName = "downsample_to_coverage", shortName = "dcov", + doc = "Coverage [integer] to downsample to. For locus-based traversals (eg., LocusWalkers and ActiveRegionWalkers)," + + "this controls the maximum depth of coverage at each locus. For non-locus-based traversals (eg., ReadWalkers), " + + "this controls the maximum number of reads sharing the same alignment start position. Note that the " + + "coverage target is an approximate goal that is not guaranteed to be met exactly: the GATK's approach " + + "to downsampling is based on even representation of reads from all alignment start positions, and the " + + "downsampling algorithm will under some circumstances retain slightly more coverage than requested.", + required = false) public Integer downsampleCoverage = null; /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java index 23b16cff2..466ade1ed 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java @@ -25,19 +25,27 @@ package org.broadinstitute.sting.gatk.downsampling; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + import java.util.Collection; import java.util.List; /** * The basic downsampler API, with no reads-specific operations. * - * Downsamplers that extend this interface rather than the ReadsDownsampler interface can handle + * Downsamplers that extend this class rather than the ReadsDownsampler class can handle * any kind of item, however they cannot be wrapped within a DownsamplingReadsIterator or a * PerSampleDownsamplingReadsIterator. * * @author David Roazen */ -public interface Downsampler { +public abstract class Downsampler { + + /** + * Number of items discarded by this downsampler since the last call to resetStats() + */ + protected int numDiscardedItems = 0; /** * Submit one item to the downsampler for consideration. Some downsamplers will be able to determine @@ -46,7 +54,7 @@ public interface Downsampler { * * @param item the individual item to submit to the downsampler for consideration */ - public void submit( T item ); + public abstract void submit( final T item ); /** * Submit a collection of items to the downsampler for consideration. Should be equivalent to calling @@ -54,21 +62,29 @@ public interface Downsampler { * * @param items the collection of items to submit to the downsampler for consideration */ - public void submit( Collection items ); + public void submit( final Collection items ) { + if ( items == null ) { + throw new IllegalArgumentException("submitted items must not be null"); + } + + for ( final T item : items ) { + submit(item); + } + } /** * Are there items that have survived the downsampling process waiting to be retrieved? * * @return true if this downsampler has > 0 finalized items, otherwise false */ - public boolean hasFinalizedItems(); + public abstract boolean hasFinalizedItems(); /** * Return (and *remove*) all items that have survived downsampling and are waiting to be retrieved. * * @return a list of all finalized items this downsampler contains, or an empty list if there are none */ - public List consumeFinalizedItems(); + public abstract List consumeFinalizedItems(); /** * Are there items stored in this downsampler that it doesn't yet know whether they will @@ -76,7 +92,7 @@ public interface Downsampler { * * @return true if this downsampler has > 0 pending items, otherwise false */ - public boolean hasPendingItems(); + public abstract boolean hasPendingItems(); /** * Peek at the first finalized item stored in this downsampler (or null if there are no finalized items) @@ -84,7 +100,7 @@ public interface Downsampler { * @return the first finalized item in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekFinalized(); + public abstract T peekFinalized(); /** * Peek at the first pending item stored in this downsampler (or null if there are no pending items) @@ -92,7 +108,7 @@ public interface Downsampler { * @return the first pending item stored in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekPending(); + public abstract T peekPending(); /** * Get the current number of items in this downsampler @@ -103,7 +119,7 @@ public interface Downsampler { * * @return a positive integer */ - public int size(); + public abstract int size(); /** * Returns the number of items discarded (so far) during the downsampling process @@ -111,21 +127,46 @@ public interface Downsampler { * @return the number of items that have been submitted to this downsampler and discarded in the process of * downsampling */ - public int getNumberOfDiscardedItems(); + public int getNumberOfDiscardedItems() { + return numDiscardedItems; + } /** * Used to tell the downsampler that no more items will be submitted to it, and that it should * finalize any pending items. */ - public void signalEndOfInput(); + public abstract void signalEndOfInput(); /** * Empty the downsampler of all finalized/pending items */ - public void clear(); + public abstract void clearItems(); /** * Reset stats in the downsampler such as the number of discarded items *without* clearing the downsampler of items */ - public void reset(); + public void resetStats() { + numDiscardedItems = 0; + } + + /** + * Indicates whether an item should be excluded from elimination during downsampling. By default, + * all items representing reduced reads are excluded from downsampling, but individual downsamplers + * may override if they are able to handle reduced reads correctly. Downsamplers should check + * the return value of this method before discarding an item. + * + * @param item The item to test + * @return true if the item should not be subject to elimination during downsampling, otherwise false + */ + protected boolean doNotDiscardItem( final Object item ) { + // Use getClass() rather than instanceof for performance reasons. Ugly but fast. + if ( item.getClass() == GATKSAMRecord.class ) { + return ((GATKSAMRecord)item).isReducedRead(); + } + else if ( item.getClass() == AlignmentStateMachine.class ) { + return ((AlignmentStateMachine)item).isReducedRead(); + } + + return false; + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java index 1cede9c33..c40f8019e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java @@ -30,7 +30,6 @@ import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; /** @@ -41,13 +40,11 @@ import java.util.List; * * @author David Roazen */ -public class FractionalDownsampler implements ReadsDownsampler { +public class FractionalDownsampler extends ReadsDownsampler { private ArrayList selectedReads; - private int cutoffForInclusion; - - private int numDiscardedItems; + private final int cutoffForInclusion; private static final int RANDOM_POOL_SIZE = 10000; @@ -57,18 +54,19 @@ public class FractionalDownsampler implements ReadsDownsamp * @param fraction Fraction of reads to preserve, between 0.0 (inclusive) and 1.0 (inclusive). * Actual number of reads preserved may differ randomly. */ - public FractionalDownsampler( double fraction ) { + public FractionalDownsampler( final double fraction ) { if ( fraction < 0.0 || fraction > 1.0 ) { throw new ReviewedStingException("Fraction of reads to include must be between 0.0 and 1.0, inclusive"); } cutoffForInclusion = (int)(fraction * RANDOM_POOL_SIZE); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { - if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion ) { + @Override + public void submit( final T newRead ) { + if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion || doNotDiscardItem(newRead) ) { selectedReads.add(newRead); } else { @@ -76,61 +74,56 @@ public class FractionalDownsampler implements ReadsDownsamp } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return selectedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed List downsampledItems = selectedReads; - clear(); + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.get(0); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new ArrayList(); } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { // NO-OP } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java index 4ff729537..3ce4d09d6 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java @@ -46,16 +46,15 @@ import java.util.*; * * @author David Roazen */ -public class LevelingDownsampler, E> implements Downsampler { +public class LevelingDownsampler, E> extends Downsampler { private final int minElementsPerStack; + private final int targetSize; private List groups; private boolean groupsAreFinalized; - private int numDiscardedItems; - /** * Construct a LevelingDownsampler * @@ -65,7 +64,7 @@ public class LevelingDownsampler, E> implements Downsampler * this value -- if it does, items are removed from Lists evenly until the total size * is <= this value */ - public LevelingDownsampler( int targetSize ) { + public LevelingDownsampler( final int targetSize ) { this(targetSize, 1); } @@ -79,55 +78,58 @@ public class LevelingDownsampler, E> implements Downsampler * if a stack has only 3 elements and minElementsPerStack is 3, no matter what * we'll not reduce this stack below 3. */ - public LevelingDownsampler(final int targetSize, final int minElementsPerStack) { + public LevelingDownsampler( final int targetSize, final int minElementsPerStack ) { if ( targetSize < 0 ) throw new IllegalArgumentException("targetSize must be >= 0 but got " + targetSize); if ( minElementsPerStack < 0 ) throw new IllegalArgumentException("minElementsPerStack must be >= 0 but got " + minElementsPerStack); this.targetSize = targetSize; this.minElementsPerStack = minElementsPerStack; - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T item ) { + @Override + public void submit( final T item ) { groups.add(item); } - public void submit( Collection items ){ + @Override + public void submit( final Collection items ){ groups.addAll(items); } + @Override public boolean hasFinalizedItems() { return groupsAreFinalized && groups.size() > 0; } + @Override public List consumeFinalizedItems() { if ( ! hasFinalizedItems() ) { return new ArrayList(); } // pass by reference rather than make a copy, for speed - List toReturn = groups; - clear(); + final List toReturn = groups; + clearItems(); return toReturn; } + @Override public boolean hasPendingItems() { return ! groupsAreFinalized && groups.size() > 0; } + @Override public T peekFinalized() { return hasFinalizedItems() ? groups.get(0) : null; } + @Override public T peekPending() { return hasPendingItems() ? groups.get(0) : null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { int s = 0; @@ -137,26 +139,24 @@ public class LevelingDownsampler, E> implements Downsampler return s; } + @Override public void signalEndOfInput() { levelGroups(); groupsAreFinalized = true; } - public void clear() { + @Override + public void clearItems() { groups = new ArrayList(); groupsAreFinalized = false; } - public void reset() { - numDiscardedItems = 0; - } - private void levelGroups() { + final int[] groupSizes = new int[groups.size()]; int totalSize = 0; - int[] groupSizes = new int[groups.size()]; int currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { groupSizes[currentGroupIndex] = group.size(); totalSize += groupSizes[currentGroupIndex]; currentGroupIndex++; @@ -191,20 +191,18 @@ public class LevelingDownsampler, E> implements Downsampler // Now we actually go through and reduce each group to its new count as specified in groupSizes currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { downsampleOneGroup(group, groupSizes[currentGroupIndex]); currentGroupIndex++; } } - private void downsampleOneGroup( T group, int numItemsToKeep ) { + private void downsampleOneGroup( final T group, final int numItemsToKeep ) { if ( numItemsToKeep >= group.size() ) { return; } - numDiscardedItems += group.size() - numItemsToKeep; - - BitSet itemsToKeep = new BitSet(group.size()); + final BitSet itemsToKeep = new BitSet(group.size()); for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(group.size(), numItemsToKeep) ) { itemsToKeep.set(selectedIndex); } @@ -213,12 +211,13 @@ public class LevelingDownsampler, E> implements Downsampler // If our group is a linked list, we can remove the desired items in a single O(n) pass with an iterator if ( group instanceof LinkedList ) { - Iterator iter = group.iterator(); + final Iterator iter = group.iterator(); while ( iter.hasNext() ) { - iter.next(); + final E item = iter.next(); - if ( ! itemsToKeep.get(currentIndex) ) { + if ( ! itemsToKeep.get(currentIndex) && ! doNotDiscardItem(item) ) { iter.remove(); + numDiscardedItems++; } currentIndex++; @@ -227,14 +226,15 @@ public class LevelingDownsampler, E> implements Downsampler // If it's not a linked list, it's more efficient to copy the desired items into a new list and back rather // than suffer O(n^2) of item shifting else { - List keptItems = new ArrayList(numItemsToKeep); + final List keptItems = new ArrayList(group.size()); - for ( E item : group ) { - if ( itemsToKeep.get(currentIndex) ) { + for ( final E item : group ) { + if ( itemsToKeep.get(currentIndex) || doNotDiscardItem(item) ) { keptItems.add(item); } currentIndex++; } + numDiscardedItems += group.size() - keptItems.size(); group.clear(); group.addAll(keptItems); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java index 3aaed6c73..1eabf5038 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java @@ -27,7 +27,6 @@ package org.broadinstitute.sting.gatk.downsampling; import net.sf.samtools.SAMRecord; -import java.util.Collection; import java.util.LinkedList; import java.util.List; @@ -39,25 +38,21 @@ import java.util.List; * * @author David Roazen */ -public class PassThroughDownsampler implements ReadsDownsampler { +public class PassThroughDownsampler extends ReadsDownsampler { private LinkedList selectedReads; public PassThroughDownsampler() { - clear(); + clearItems(); } + @Override public void submit( T newRead ) { // All reads pass-through, no reads get downsampled selectedReads.add(newRead); } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return ! selectedReads.isEmpty(); } @@ -66,50 +61,50 @@ public class PassThroughDownsampler implements ReadsDownsam * Note that this list is a linked list and so doesn't support fast random access * @return */ + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List downsampledItems = selectedReads; - clear(); + final List downsampledItems = selectedReads; + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.getFirst(); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return 0; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new LinkedList(); } - public void reset() { - // NO-OP - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java index a878d7553..a8df014e5 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java @@ -32,14 +32,14 @@ import net.sf.samtools.SAMRecord; * * @author David Roazen */ -public interface ReadsDownsampler extends Downsampler { +public abstract class ReadsDownsampler extends Downsampler { /** * Does this downsampler require that reads be fed to it in coordinate order? * * @return true if reads must be submitted to this downsampler in coordinate order, otherwise false */ - public boolean requiresCoordinateSortOrder(); + public abstract boolean requiresCoordinateSortOrder(); /** * Tell this downsampler that no more reads located before the provided read (according to @@ -52,5 +52,5 @@ public interface ReadsDownsampler extends Downsampler { * @param read the downsampler will assume that no reads located before this read will ever * be submitted to it in the future */ - public void signalNoMoreReadsBefore( T read ); + public abstract void signalNoMoreReadsBefore( final T read ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java index 0e6bbfcb6..ff085d17b 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java @@ -39,7 +39,12 @@ import java.util.*; * * @author David Roazen */ -public class ReservoirDownsampler implements ReadsDownsampler { +public class ReservoirDownsampler extends ReadsDownsampler { + + /** + * size of our reservoir -- ie., the maximum number of reads from the stream that will be retained + * (not including any undiscardable items) + */ private final int targetSampleSize; /** @@ -58,17 +63,33 @@ public class ReservoirDownsampler implements ReadsDownsampl */ private List reservoir; + /** + * Certain items (eg., reduced reads) cannot be discarded at all during downsampling. We store + * these items separately so as not to impact the fair selection of items for inclusion in the + * reservoir. These items are returned (and cleared) along with any items in the reservoir in + * calls to consumeFinalizedItems(). + */ + private List undiscardableItems; + + /** + * Are we currently using a linked list for the reservoir? + */ private boolean isLinkedList; - private int totalReadsSeen; + /** + * Count of the number of reads seen that were actually eligible for discarding. Used by the reservoir downsampling + * algorithm to ensure that all discardable reads have an equal chance of making it into the reservoir. + */ + private int totalDiscardableReadsSeen; - private int numDiscardedItems; /** * Construct a ReservoirDownsampler * * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained - * after downsampling will be min(totalReads, targetSampleSize) + * after downsampling will be min(totalDiscardableReads, targetSampleSize) + any + * undiscardable reads (eg., reduced reads). + * * @param expectFewOverflows if true, this downsampler will be optimized for the case * where most of the time we won't fill up anything like the * targetSampleSize elements. If this is false, we will allocate @@ -76,15 +97,15 @@ public class ReservoirDownsampler implements ReadsDownsampl * the cost of allocation if we often use targetSampleSize or more * elements. */ - public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows) { + public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows ) { if ( targetSampleSize <= 0 ) { throw new ReviewedStingException("Cannot do reservoir downsampling with a sample size <= 0"); } this.targetSampleSize = targetSampleSize; this.expectFewOverflows = expectFewOverflows; - clear(); - reset(); + clearItems(); + resetStats(); } /** @@ -93,15 +114,21 @@ public class ReservoirDownsampler implements ReadsDownsampl * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained * after downsampling will be min(totalReads, targetSampleSize) */ - public ReservoirDownsampler ( int targetSampleSize ) { + public ReservoirDownsampler ( final int targetSampleSize ) { this(targetSampleSize, false); } + @Override + public void submit ( final T newRead ) { + if ( doNotDiscardItem(newRead) ) { + undiscardableItems.add(newRead); + return; + } - public void submit ( T newRead ) { - totalReadsSeen++; + // Only count reads that are actually eligible for discarding for the purposes of the reservoir downsampling algorithm + totalDiscardableReadsSeen++; - if ( totalReadsSeen <= targetSampleSize ) { + if ( totalDiscardableReadsSeen <= targetSampleSize ) { reservoir.add(newRead); } else { @@ -110,7 +137,7 @@ public class ReservoirDownsampler implements ReadsDownsampl isLinkedList = false; } - final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen); + final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalDiscardableReadsSeen); if ( randomSlot < targetSampleSize ) { reservoir.set(randomSlot, newRead); } @@ -118,49 +145,46 @@ public class ReservoirDownsampler implements ReadsDownsampl } } - public void submit ( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { - return reservoir.size() > 0; + return ! reservoir.isEmpty() || ! undiscardableItems.isEmpty(); } + @Override public List consumeFinalizedItems() { - if ( reservoir.isEmpty() ) { - // if there's nothing here, don't both allocating a new list completely + if ( ! hasFinalizedItems() ) { + // if there's nothing here, don't bother allocating a new list return Collections.emptyList(); } else { - // pass by reference rather than make a copy, for speed - List downsampledItems = reservoir; - clear(); + // pass reservoir by reference rather than make a copy, for speed + final List downsampledItems = reservoir; + downsampledItems.addAll(undiscardableItems); + clearItems(); return downsampledItems; } } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { - return reservoir.isEmpty() ? null : reservoir.get(0); + return ! reservoir.isEmpty() ? reservoir.get(0) : (! undiscardableItems.isEmpty() ? undiscardableItems.get(0) : null); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; + @Override + public int size() { + return reservoir.size() + undiscardableItems.size(); } @Override - public int size() { - return reservoir.size(); - } - public void signalEndOfInput() { // NO-OP } @@ -168,25 +192,27 @@ public class ReservoirDownsampler implements ReadsDownsampl /** * Clear the data structures used to hold information */ - public void clear() { + @Override + public void clearItems() { // if we aren't expecting many overflows, allocate a linked list not an arraylist reservoir = expectFewOverflows ? new LinkedList() : new ArrayList(targetSampleSize); + // there's no possibility of overflow with the undiscardable items, so we always use a linked list for them + undiscardableItems = new LinkedList<>(); + // it's a linked list if we allocate one isLinkedList = expectFewOverflows; - // an internal stat used by the downsampling process, so not cleared by reset() below - totalReadsSeen = 0; - } - - public void reset() { - numDiscardedItems = 0; + // an internal stat used by the downsampling process, so not cleared by resetStats() below + totalDiscardableReadsSeen = 0; } + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java index 7c6c043c2..897e2c05e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java @@ -35,11 +35,11 @@ import java.util.*; * * @author David Roazen */ -public class SimplePositionalDownsampler implements ReadsDownsampler { +public class SimplePositionalDownsampler extends ReadsDownsampler { - private int targetCoverage; + private final int targetCoverage; - private ReservoirDownsampler reservoir; + private final ReservoirDownsampler reservoir; private int currentContigIndex; @@ -51,97 +51,93 @@ public class SimplePositionalDownsampler implements ReadsDo private ArrayList finalizedReads; - private int numDiscardedItems; /** * Construct a SimplePositionalDownsampler * * @param targetCoverage Maximum number of reads that may share any given alignment start position */ - public SimplePositionalDownsampler( int targetCoverage ) { + public SimplePositionalDownsampler( final int targetCoverage ) { this.targetCoverage = targetCoverage; reservoir = new ReservoirDownsampler(targetCoverage); finalizedReads = new ArrayList(); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { + @Override + public void submit( final T newRead ) { updatePositionalState(newRead); if ( unmappedReadsReached ) { // don't downsample the unmapped reads at the end of the stream finalizedReads.add(newRead); } else { - int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + final int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + // our reservoir downsampler will call doNotDiscardItem() for us to exclude items from elimination as appropriate reservoir.submit(newRead); numDiscardedItems += reservoir.getNumberOfDiscardedItems() - reservoirPreviouslyDiscardedItems; } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return finalizedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List toReturn = finalizedReads; + final List toReturn = finalizedReads; finalizedReads = new ArrayList(); return toReturn; } + @Override public boolean hasPendingItems() { return reservoir.hasFinalizedItems(); } + @Override public T peekFinalized() { return finalizedReads.isEmpty() ? null : finalizedReads.get(0); } + @Override public T peekPending() { return reservoir.peekFinalized(); } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return finalizedReads.size() + reservoir.size(); } + @Override public void signalEndOfInput() { finalizeReservoir(); } - public void clear() { - reservoir.clear(); - reservoir.reset(); + @Override + public void clearItems() { + reservoir.clearItems(); + reservoir.resetStats(); finalizedReads.clear(); positionEstablished = false; unmappedReadsReached = false; } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return true; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { updatePositionalState(read); } - private void updatePositionalState( T newRead ) { + private void updatePositionalState( final T newRead ) { if ( readIsPastCurrentPosition(newRead) ) { if ( reservoir.hasFinalizedItems() ) { finalizeReservoir(); @@ -155,13 +151,13 @@ public class SimplePositionalDownsampler implements ReadsDo } } - private void setCurrentPosition( T read ) { + private void setCurrentPosition( final T read ) { currentContigIndex = read.getReferenceIndex(); currentAlignmentStart = read.getAlignmentStart(); positionEstablished = true; } - private boolean readIsPastCurrentPosition( T read ) { + private boolean readIsPastCurrentPosition( final T read ) { return ! positionEstablished || read.getReferenceIndex() > currentContigIndex || read.getAlignmentStart() > currentAlignmentStart || @@ -170,6 +166,6 @@ public class SimplePositionalDownsampler implements ReadsDo private void finalizeReservoir() { finalizedReads.addAll(reservoir.consumeFinalizedItems()); - reservoir.reset(); + reservoir.resetStats(); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java index 80da8f8eb..424bd489e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java @@ -43,17 +43,42 @@ import java.util.List; * Time: 11:23 AM */ public class TAROrderedReadCache { - final int maxCapacity; - final Downsampler downsampler; + private final int maxCapacity; + private ArrayList undownsampledCache; + private Downsampler downsampler; + + private static final int UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE = 10000; /** * Create a new empty ReadCache * @param maxCapacity the max capacity of the read cache. */ - public TAROrderedReadCache(int maxCapacity) { + public TAROrderedReadCache( final int maxCapacity ) { if ( maxCapacity < 0 ) throw new IllegalArgumentException("maxCapacity must be >= 0 but got " + maxCapacity); this.maxCapacity = maxCapacity; - this.downsampler = new ReservoirDownsampler(maxCapacity); + + // The one we're not currently using will always be null: + initializeUndownsampledCache(); + this.downsampler = null; + } + + /** + * Moves all reads over to the downsampler, causing it to be used from this point on. Should be called + * when the undownsampledCache fills up and we need to start discarding reads. Since the + * ReservoirDownsampler doesn't preserve relative ordering, pop operations become expensive + * after this point, as they require a O(n log n) sort. + */ + private void activateDownsampler() { + downsampler = new ReservoirDownsampler<>(maxCapacity, false); + downsampler.submit(undownsampledCache); + undownsampledCache = null; // preferable to the O(n) clear() method + } + + /** + * Allocate the undownsampled cache used when we have fewer than maxCapacity items + */ + private void initializeUndownsampledCache() { + undownsampledCache = new ArrayList<>(Math.min(maxCapacity + 1, UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE)); } /** @@ -68,18 +93,31 @@ public class TAROrderedReadCache { * Add a single read to this cache. Assumed to be in sorted order w.r.t. the previously added reads * @param read a read to add */ - public void add(final GATKSAMRecord read) { + public void add( final GATKSAMRecord read ) { if ( read == null ) throw new IllegalArgumentException("Read cannot be null"); - downsampler.submit(read); + + if ( downsampler != null ) { + downsampler.submit(read); + } + else { + undownsampledCache.add(read); + + // No more room in the undownsampledCache? Time to start downsampling + if ( undownsampledCache.size() > maxCapacity ) { + activateDownsampler(); + } + } } /** * Add a collection of reads to this cache. Assumed to be in sorted order w.r.t. the previously added reads and each other * @param reads a collection of reads to add */ - public void addAll(final List reads) { + public void addAll( final List reads ) { if ( reads == null ) throw new IllegalArgumentException("Reads cannot be null"); - downsampler.submit(reads); + for ( final GATKSAMRecord read : reads ) { + add(read); + } } /** @@ -87,40 +125,44 @@ public class TAROrderedReadCache { * @return a positive integer */ public int size() { - return downsampler.size(); + return downsampler != null ? downsampler.size() : undownsampledCache.size(); } /** * How many reads were discarded since the last call to popCurrentReads - * @return + * + * @return number of items discarded during downsampling since last pop operation */ public int getNumDiscarded() { - return downsampler.getNumberOfDiscardedItems(); + return downsampler != null ? downsampler.getNumberOfDiscardedItems() : 0; } /** * Removes all reads currently in the cache, and returns them in sorted order (w.r.t. alignmentStart) * - * Flushes this cache, so after this call the cache will contain no reads and all downsampling stats will - * be reset. + * Flushes this cache, so after this call the cache will contain no reads, and we'll be in the same + * initial state as the constructor would put us in, with a non-null undownsampledCache and a null + * downsampler. * * @return a list of GATKSAMRecords in this cache */ public List popCurrentReads() { - final List maybeUnordered = downsampler.consumeFinalizedItems(); + final List poppedReads; - final List ordered; - if ( downsampler.getNumberOfDiscardedItems() == 0 ) { - // haven't discarded anything, so the reads are ordered properly - ordered = maybeUnordered; - } else { - // we need to sort these damn things: O(n log n) - ordered = new ArrayList(maybeUnordered); - Collections.sort(ordered, new AlignmentStartComparator()); + if ( downsampler == null ) { + poppedReads = undownsampledCache; // avoid making a copy here, since we're going to allocate a new cache + } + else { + // If we triggered the downsampler, we need to sort the reads before returning them, + // since the ReservoirDownsampler is not guaranteed to preserve relative ordering of items. + // After consuming the downsampled items in this call to popCurrentReads(), we switch back + // to using the undownsampledCache until we fill up again. + poppedReads = downsampler.consumeFinalizedItems(); // avoid making a copy here + Collections.sort(poppedReads, new AlignmentStartComparator()); + downsampler = null; } - // reset the downsampler stats so getNumberOfDiscardedItems is 0 - downsampler.reset(); - return ordered; + initializeUndownsampledCache(); + return poppedReads; } } diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java index c4b566582..86f3500be 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java @@ -123,6 +123,15 @@ public class AlignmentStateMachine { return getRead().getReferenceIndex(); } + /** + * Is our read a reduced read? + * + * @return true if the read we encapsulate is a reduced read, otherwise false + */ + public boolean isReducedRead() { + return read.isReducedRead(); + } + /** * Is this the left edge state? I.e., one that is before or after the current read? * @return true if this state is an edge state, false otherwise diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java index 2caaf9d27..669e76adc 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java @@ -167,7 +167,7 @@ final class PerSampleReadStateManager implements Iterable // use returned List directly rather than make a copy, for efficiency's sake readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems()); - levelingDownsampler.reset(); + levelingDownsampler.resetStats(); } return nStatesAdded; diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java index 49a8d10aa..9122beebb 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java @@ -164,8 +164,8 @@ class SamplePartitioner { @Ensures("doneSubmittingReads == false") public void reset() { for ( final Downsampler downsampler : readsBySample.values() ) { - downsampler.clear(); - downsampler.reset(); + downsampler.clearItems(); + downsampler.resetStats(); } doneSubmittingReads = false; } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java index b8367a7df..055f8630b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java @@ -324,6 +324,31 @@ public class ArtificialSAMUtils { return Arrays.asList(left, right); } + /** + * Create an artificial reduced read based on the parameters. The cigar string will be *M, where * is the + * length of the read. The base counts specified in the baseCounts array will be stored fully encoded in + * the RR attribute. + * + * @param header the SAM header to associate the read with + * @param name the name of the read + * @param refIndex the reference index, i.e. what chromosome to associate it with + * @param alignmentStart where to start the alignment + * @param length the length of the read + * @param baseCounts reduced base counts to encode in the RR attribute; length must match the read length + * @return the artificial reduced read + */ + public static GATKSAMRecord createArtificialReducedRead( final SAMFileHeader header, + final String name, + final int refIndex, + final int alignmentStart, + final int length, + final int[] baseCounts ) { + final GATKSAMRecord read = createArtificialRead(header, name, refIndex, alignmentStart, length); + read.setReducedReadCounts(baseCounts); + read.setReducedReadCountsTag(); + return read; + } + /** * Create a collection of identical artificial reads based on the parameters. The cigar string for each * read will be *M, where * is the length of the read. diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java index 6f18d794f..8f0eee069 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -152,7 +153,39 @@ public class FractionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.totalReads - downsampledReads.size()); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new FractionalDownsampler(0.0); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 5, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 10, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 10, "wrong number of items returned by the downsampler"); + + for ( GATKSAMRecord readReturned : readsReturned ) { + Assert.assertTrue(readReturned.isReducedRead(), "non-reduced read survived the downsampling process, but shouldn't have"); + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java index 972e51dcd..8cf0fd2a1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java @@ -25,16 +25,17 @@ package org.broadinstitute.sting.gatk.downsampling; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.Test; import org.testng.annotations.DataProvider; import org.testng.Assert; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; +import java.util.*; public class LevelingDownsamplerUnitTest extends BaseTest { @@ -158,9 +159,46 @@ public class LevelingDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numItemsReportedDiscarded, numItemsActuallyDiscarded); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); Assert.assertTrue(totalRemainingItems <= Math.max(test.targetSize, test.numStacks)); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final Downsampler> downsampler = new LevelingDownsampler, AlignmentStateMachine>(1); + + final Collection> groups = new LinkedList>(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + final LinkedList group = new LinkedList(); + for ( int i = 1; i <= 10; i++ ) { + group.add(new AlignmentStateMachine(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts))); + } + groups.add(group); + } + + downsampler.submit(groups); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 30, "downsampler size() reports wrong number of items"); + + final Collection> groupsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(groupsReturned.size(), 3, "wrong number of groups returned by the downsampler"); + + for ( LinkedList group : groupsReturned ) { + Assert.assertEquals(group.size(), 10, "group has wrong size after downsampling"); + + for ( AlignmentStateMachine state : group ) { + Assert.assertTrue(state.isReducedRead()); + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java index 022eb02d2..a50201efd 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -125,7 +126,49 @@ public class ReservoirDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.expectedNumDiscardedItems); Assert.assertEquals(test.totalReads - downsampledReads.size(), test.expectedNumDiscardedItems); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new ReservoirDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 4, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 11, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 11, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 10, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 1, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java index c6b0dea29..bec0030d0 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java @@ -177,7 +177,7 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numReadsActuallyEliminated, numReadsReportedEliminated); } - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } @@ -328,4 +328,48 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampledReads.size(), 10); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new SimplePositionalDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, alignmentStart, 5)); + } + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 12, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 33, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 33, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 30, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 3, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java index f3e1ce44b..4d85997b3 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java @@ -26,9 +26,11 @@ package org.broadinstitute.sting.gatk.traversals; import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -39,6 +41,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; public class TAROrderedReadCacheUnitTest extends BaseTest { @@ -98,8 +101,53 @@ public class TAROrderedReadCacheUnitTest extends BaseTest { Assert.assertEquals(cache.getNumDiscarded(), 0, "should have reset stats"); Assert.assertEquals(cacheReads.size(), nExpectedToKeep, "should have 1 read for every read we expected to keep"); + verifySortednessOfReads(cacheReads); + } + + @Test + public void testReadCacheWithReducedReads() { + final List reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 100; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, i, 5, baseCounts)); + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, i, 5)); + } + + final TAROrderedReadCache cache = new TAROrderedReadCache(50); + + cache.addAll(reads); + + // Our cache should have kept all of the reduced reads (which are retained unconditionally and do not count + // towards the capacity limit), and discarded half of the 100 non-reduced reads due to the cache capacity + // limit of 50. + Assert.assertEquals(cache.size(), 150, "wrong number of reads in the cache at the end"); + Assert.assertEquals(cache.getNumDiscarded(), 50, "wrong number of reads discarded from the cache"); + + final List cacheReads = cache.popCurrentReads(); + + int numReducedReadsRetained = 0; + int numNormalReadsRetained = 0; + + for ( GATKSAMRecord read : cacheReads ) { + if ( read.isReducedRead() ) { + numReducedReadsRetained++; + } + else { + numNormalReadsRetained++; + } + } + + Assert.assertEquals(numReducedReadsRetained, 100, "wrong number of reduced reads retained in the cache"); + Assert.assertEquals(numNormalReadsRetained, 50, "wrong number of non-reduced reads retained in the cache"); + + verifySortednessOfReads(cacheReads); + } + + private void verifySortednessOfReads( final List reads) { int lastStart = -1; - for ( final GATKSAMRecord read : cacheReads ) { + for ( GATKSAMRecord read : reads ) { Assert.assertTrue(lastStart <= read.getAlignmentStart(), "Reads should be sorted but weren't. Found read with start " + read.getAlignmentStart() + " while last was " + lastStart); lastStart = read.getAlignmentStart(); } From d1f397c7115e37dd4b54091a60b71fdb57260d89 Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Wed, 12 Jun 2013 12:22:36 -0400 Subject: [PATCH 52/99] Fixing bug with dangling tails in which the tail connects all the way back to the reference source node. -- List of vertices can't contain a source node. --- .../sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java | 4 ++-- .../haplotypecaller/readthreading/ReadThreadingAssembler.java | 2 +- .../haplotypecaller/readthreading/ReadThreadingGraph.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index 70ef539f3..2b37d90c2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -309,7 +309,7 @@ public class BaseGraph extends Default } v = getNextReferenceVertex(v); // advance along the reference path while( v != null && !v.equals(toVertex) ) { - bytes = ArrayUtils.addAll( bytes, getAdditionalSequence(v) ); + bytes = ArrayUtils.addAll(bytes, getAdditionalSequence(v)); v = getNextReferenceVertex(v); // advance along the reference path } if( includeStop && v != null && v.equals(toVertex)) { @@ -561,7 +561,7 @@ public class BaseGraph extends Default verticesToRemove.removeAll(onPathFromRefSource); removeAllVertices(verticesToRemove); - // simple santity checks that this algorithm is working. + // simple sanity checks that this algorithm is working. if ( getSinks().size() > 1 ) { throw new IllegalStateException("Should have eliminated all but the reference sink, but found " + getSinks()); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index f4290f2bb..fc0f781c5 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -207,7 +207,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { * @return */ private boolean reasonableNumberOfPaths(final SeqGraph graph) { - final KBestPaths pathFinder = new KBestPaths(false); + final KBestPaths pathFinder = new KBestPaths<>(false); final List> allPaths = pathFinder.getKBestPaths(graph, 100000); logger.info("Found " + allPaths.size() + " paths through " + graph + " with maximum " + maxAllowedPathsForReadThreadingAssembler); return allPaths.size() <= maxAllowedPathsForReadThreadingAssembler; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index 8d8cb83f6..0844f979b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -339,7 +339,7 @@ public class ReadThreadingGraph extends BaseGraph altPath = findPathToLowestCommonAncestorOfReference(vertex); - if ( altPath == null ) + if ( altPath == null || isRefSource(altPath.get(0)) ) return null; // now get the reference path from the LCA From d5f0848bd50d698e5509ffcdd20b4de6e39c684c Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Thu, 13 Jun 2013 09:59:16 -0400 Subject: [PATCH 53/99] HC bam writer now sets the read to MQ0 if it isn't informative -- Makes visualization of read evidence easier in IGV. --- .../haplotypeBAMWriter/AllHaplotypeBAMWriter.java | 8 ++++---- .../CalledHaplotypeBAMWriter.java | 6 +++--- .../haplotypeBAMWriter/HaplotypeBAMWriter.java | 14 +++++++++++--- .../HaplotypeBAMWriterUnitTest.java | 6 +++--- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java index 54061c781..e7e5cf0e1 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java @@ -80,18 +80,18 @@ class AllHaplotypeBAMWriter extends HaplotypeBAMWriter { final List bestHaplotypes, final Set calledHaplotypes, final Map stratifiedReadMap) { - writeHaplotypesAsReads(haplotypes, new HashSet(bestHaplotypes), paddedReferenceLoc); + writeHaplotypesAsReads(haplotypes, new HashSet<>(bestHaplotypes), paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : haplotypes ) alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); // next, output the interesting reads for each sample aligned against the appropriate haplotype for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue()); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java index d63cf65fc..7206dd674 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java @@ -87,7 +87,7 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { writeHaplotypesAsReads(calledHaplotypes, calledHaplotypes, paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : calledHaplotypes ) { alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); } @@ -97,10 +97,10 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { // next, output the interesting reads for each sample aligned against one of the called haplotypes for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { if ( entry.getKey().getMappingQuality() > 0 ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue(), allelesOfCalledHaplotypes); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java index 2eea664d9..1afbeed63 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java @@ -185,11 +185,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes */ protected void writeReadAgainstHaplotype(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { - final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart); + final int referenceStart, + final boolean isInformative) { + final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart, isInformative); if ( alignedToRef != null ) bamWriter.addAlignment(alignedToRef); } @@ -201,11 +203,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes * @return a GATKSAMRecord aligned to reference, or null if no meaningful alignment is possible */ protected GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { + final int referenceStart, + final boolean isInformative) { if ( originalRead == null ) throw new IllegalArgumentException("originalRead cannot be null"); if ( haplotype == null ) throw new IllegalArgumentException("haplotype cannot be null"); if ( haplotype.getCigar() == null ) throw new IllegalArgumentException("Haplotype cigar not set " + haplotype); @@ -225,6 +229,10 @@ public abstract class HaplotypeBAMWriter { addHaplotypeTag(read, haplotype); + // uninformative reads are set to zero mapping quality to enhance visualization + if ( !isInformative ) + read.setMappingQuality(0); + // compute here the read starts w.r.t. the reference from the SW result and the hap -> ref cigar final Cigar extendedHaplotypeCigar = haplotype.getConsolidatedPaddedCigar(1000); final int readStartOnHaplotype = AlignmentUtils.calcFirstBaseMatchingReferenceInCigar(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1()); diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java index 91a2988aa..0c76ad338 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java @@ -177,10 +177,10 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { final GATKSAMRecord originalReadCopy = (GATKSAMRecord)read.clone(); if ( expectedReadCigar == null ) { - Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart)); + Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart, true)); } else { final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedReadCigar); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart, true); Assert.assertEquals(alignedRead.getReadName(), originalReadCopy.getReadName()); Assert.assertEquals(alignedRead.getAlignmentStart(), expectedReadStart); @@ -290,7 +290,7 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { @Test(dataProvider = "ComplexReadAlignedToRef", enabled = !DEBUG) public void testReadAlignedToRefComplexAlignment(final int testIndex, final GATKSAMRecord read, final String reference, final Haplotype haplotype, final int expectedMaxMismatches) throws Exception { final HaplotypeBAMWriter writer = new CalledHaplotypeBAMWriter(new MockBAMWriter()); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1, true); if ( alignedRead != null ) { final int mismatches = AlignmentUtils.getMismatchCount(alignedRead, reference.getBytes(), alignedRead.getAlignmentStart() - 1).numMismatches; Assert.assertTrue(mismatches <= expectedMaxMismatches, From f44efc27ae1c79c1d376d930014ce0d22dda6f8b Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Thu, 13 Jun 2013 10:05:53 -0400 Subject: [PATCH 54/99] Relaxing the constraints on the readIsPoorlyModelled function. -- Turns out we were aggressively throwing out borderline-good reads. --- .../haplotypecaller/HaplotypeCaller.java | 2 +- ...plexAndSymbolicVariantsIntegrationTest.java | 6 +++--- .../HaplotypeCallerIntegrationTest.java | 18 +++++++++--------- ...HaplotypeCallerParallelIntegrationTest.java | 2 +- .../PerReadAlleleLikelihoodMapUnitTest.java | 6 +++--- .../genotyper/PerReadAlleleLikelihoodMap.java | 6 +++--- 6 files changed, 20 insertions(+), 20 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index fb7fb652c..f3f54060f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -459,7 +459,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // the minimum length of a read we'd consider using for genotyping private final static int MIN_READ_LENGTH = 10; - private List samplesList = new ArrayList(); + private List samplesList = new ArrayList<>(); private final static Allele FAKE_REF_ALLELE = Allele.create("N", true); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file private final static Allele FAKE_ALT_ALLELE = Allele.create("", false); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index 073d54ec5..8394baa72 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "d21f15a5809fe5259af41ae6774af6f1"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "e7b28ea087e8624f1e596c9d65381fea"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "d4a0797c2fd4c103bf9a137633376156"); + "321dc9f3d330790bac7981ffae00cb0c"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "a9872228d0275a30f5a1f7e070a9c9f4"); + "2a72a9b5c6778b99bf155a7c5e90d11e"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index dbdd0afcd..f9bab8ea7 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -78,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "e9167a1bfc0fc276586788d1ce1be408"); + HCTest(CEUTRIO_BAM, "", "f25b9cfc85995cbe8eb6ba5a126d713d"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "b1d46afb9659ac3b92a3d131b58924ef"); + HCTest(NA12878_BAM, "", "19d685727ec60b3568f313bc44f79b49"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -94,7 +94,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "d83856b8136776bd731a8037c16b71fa"); + "6da65f1d396b9c709eb6246cf3f615c1"); } @Test @@ -110,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "70c4476816f5d35c9978c378dbeac09b"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "e3db7d56154e36eeb887259bea4b241d"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -147,7 +147,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "947aae309ecab7cd3f17ff9810884924"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "6e170d03047caefc2fba3f1c1f8de132"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -186,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("0124c4923d96ec0f8222b596dd4ef534")); + Arrays.asList("40416433baf96f4e84a058459717060b")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -194,7 +194,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("0e020dcfdf249225714f5cd86ed3869f")); + Arrays.asList("cf1461ce829023ea9920fbfeb534eb97")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } @@ -208,7 +208,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestDBSNPAnnotationWGS() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, - Arrays.asList("446a786bb539f3ec2084dd75167568aa")); + Arrays.asList("45ca324be3917655e645d6c290c9280f")); executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); } @@ -217,7 +217,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + " -L " + hg19Intervals + " -isr INTERSECTION", 1, - Arrays.asList("9587029b702bb59bd4dfec69eac4c210")); + Arrays.asList("b7037770b7953cdf858764b99fa243ed")); executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java index 62e685eab..857d0fc9e 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java @@ -61,7 +61,7 @@ public class HaplotypeCallerParallelIntegrationTest extends WalkerTest { List tests = new ArrayList(); for ( final int nct : Arrays.asList(1, 2, 4) ) { - tests.add(new Object[]{nct, "ef42a438b82681d1c0f921c57e16ff12"}); + tests.add(new Object[]{nct, "bd2a57e6b0cffb4cbdba609a6c1683dc"}); } return tests.toArray(new Object[][]{}); diff --git a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java index 9530ea41f..651beffc8 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java @@ -233,7 +233,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -3.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -2.0)}); - tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.0)}); + tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.2)}); tests.add(new Object[]{100, 0.001, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.001, false, Arrays.asList(-5.0, -10.0, 0.0)}); @@ -243,7 +243,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { @Test(dataProvider = "PoorlyModelledReadData") public void testPoorlyModelledRead(final int readLen, final double maxErrorRatePerBase, final boolean expected, final List log10likelihoods) { final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, readLen + "M"); @@ -279,7 +279,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { final double likelihood = bad ? -100.0 : 0.0; final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final Allele allele = Allele.create(Utils.dupString("A", readI+1)); diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 8067d67bc..70be85f54 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -321,7 +321,7 @@ public class PerReadAlleleLikelihoodMap { * @return the list of reads removed from this map because they are poorly modelled */ public List filterPoorlyModelledReads(final double maxErrorRatePerBase) { - final List removedReads = new LinkedList(); + final List removedReads = new LinkedList<>(); final Iterator>> it = likelihoodReadMap.entrySet().iterator(); while ( it.hasNext() ) { final Map.Entry> record = it.next(); @@ -356,8 +356,8 @@ public class PerReadAlleleLikelihoodMap { * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes */ protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection log10Likelihoods, final double maxErrorRatePerBase) { - final double maxErrorsForRead = Math.ceil(read.getReadLength() * maxErrorRatePerBase); - final double log10QualPerBase = -3.0; + final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase)); + final double log10QualPerBase = -4.0; final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase; for ( final double log10Likelihood : log10Likelihoods ) From dd6e2523731bd829e3df5658bbb2ba8505346a54 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 12 Jun 2013 17:47:27 -0400 Subject: [PATCH 55/99] GATKRunReport no longer tries to use the Broad filesystem destination, rather it goes unconditionally to S3 --- .../org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java index de84809bd..9704454c9 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java +++ b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java @@ -419,7 +419,7 @@ public class GATKRunReport { * @return true if and only if the common run report repository is available and online to receive reports */ private boolean repositoryIsOnline() { - return REPORT_SENTINEL.exists(); + return false; // REPORT_SENTINEL.exists(); } From 33720b83ebd093dea536b43428f937994cfc9bc4 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 10 Jun 2013 14:52:41 -0400 Subject: [PATCH 56/99] No longer merge overlapping fragments from HaplotypeCaller -- Merging overlapping fragments turns out to be a bad idea. In the case where you can safely merge the reads you only gain a small about of overlapping kmers, so the potential gains are relatively small. That's in contrast to the very large danger of merging reads inappropriately, such as when the reads only overlap in a repetitive region, and you artificially construct reads that look like the reference but actually may carry a larger true insertion w.r.t. the reference. Because this problem isn't limited to repetitive sequeuence, but in principle could occur in any sequence, it's just not safe to do this merging. Best to leave haplotype construction to the assembly graph. --- .../walkers/haplotypecaller/HaplotypeCaller.java | 14 +++----------- ...omplexAndSymbolicVariantsIntegrationTest.java | 4 ++-- .../HaplotypeCallerIntegrationTest.java | 16 ++++++++-------- .../HaplotypeCallerParallelIntegrationTest.java | 2 +- 4 files changed, 14 insertions(+), 22 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index f3f54060f..b94b74748 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -919,19 +919,10 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private void finalizeActiveRegion( final ActiveRegion activeRegion ) { if( DEBUG ) { logger.info("Assembling " + activeRegion.getLocation() + " with " + activeRegion.size() + " reads: (with overlap region = " + activeRegion.getExtendedLoc() + ")"); } - final List finalizedReadList = new ArrayList<>(); - final FragmentCollection fragmentCollection = FragmentUtils.create( activeRegion.getReads() ); - activeRegion.clearReads(); - - // Join overlapping paired reads to create a single longer read - finalizedReadList.addAll( fragmentCollection.getSingletonReads() ); - for( final List overlappingPair : fragmentCollection.getOverlappingPairs() ) { - finalizedReadList.addAll( FragmentUtils.mergeOverlappingPairedFragments(overlappingPair) ); - } // Loop through the reads hard clipping the adaptor and low quality tails - final List readsToUse = new ArrayList<>(finalizedReadList.size()); - for( final GATKSAMRecord myRead : finalizedReadList ) { + final List readsToUse = new ArrayList<>(activeRegion.getReads().size()); + for( final GATKSAMRecord myRead : activeRegion.getReads() ) { final GATKSAMRecord postAdapterRead = ( myRead.getReadUnmappedFlag() ? myRead : ReadClipper.hardClipAdaptorSequence( myRead ) ); if( postAdapterRead != null && !postAdapterRead.isEmpty() && postAdapterRead.getCigar().getReadLength() > 0 ) { GATKSAMRecord clippedRead; @@ -962,6 +953,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } } + activeRegion.clearReads(); activeRegion.addAll(DownsamplingUtils.levelCoverageByPosition(ReadUtils.sortReadsByCoordinate(readsToUse), maxReadsInRegionPerSample, minReadsPerAlignmentStart)); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index 8394baa72..c1b8f8a70 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "e7b28ea087e8624f1e596c9d65381fea"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "03944bbedb012e2ac2026a84baa0560c"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -94,6 +94,6 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "2a72a9b5c6778b99bf155a7c5e90d11e"); + "7e9f99d4cba8087dac66ea871b910d7e"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index f9bab8ea7..da92f39fc 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -78,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "f25b9cfc85995cbe8eb6ba5a126d713d"); + HCTest(CEUTRIO_BAM, "", "09d84bc1aef2dd9c185934752172b794"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "19d685727ec60b3568f313bc44f79b49"); + HCTest(NA12878_BAM, "", "5c074930b27d1f5c942fe755c2a8be27"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -94,7 +94,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "6da65f1d396b9c709eb6246cf3f615c1"); + "005a6d1933913a5d96fc56d01303fa95"); } @Test @@ -110,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "e3db7d56154e36eeb887259bea4b241d"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "9b6f667ad87e19c38d16fefe63c37484"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -186,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("40416433baf96f4e84a058459717060b")); + Arrays.asList("a47ef09a8701128cfb301a83b7bb0728")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -194,7 +194,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("cf1461ce829023ea9920fbfeb534eb97")); + Arrays.asList("0cb99f6bb3e630add4b3486c496fa508")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } @@ -208,7 +208,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestDBSNPAnnotationWGS() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, - Arrays.asList("45ca324be3917655e645d6c290c9280f")); + Arrays.asList("92f947cc89e4f50cf2ef3121d2fe308d")); executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); } @@ -217,7 +217,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + " -L " + hg19Intervals + " -isr INTERSECTION", 1, - Arrays.asList("b7037770b7953cdf858764b99fa243ed")); + Arrays.asList("91877c8ea3eb0e0316d9ad11fdcc1a87")); executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java index 857d0fc9e..d009550f4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java @@ -61,7 +61,7 @@ public class HaplotypeCallerParallelIntegrationTest extends WalkerTest { List tests = new ArrayList(); for ( final int nct : Arrays.asList(1, 2, 4) ) { - tests.add(new Object[]{nct, "bd2a57e6b0cffb4cbdba609a6c1683dc"}); + tests.add(new Object[]{nct, "9da4cc89590c4c64a36f4a9c820f8609"}); } return tests.toArray(new Object[][]{}); From dd5674b3b8c088e7dbb0b7e1822f4e55d02f7315 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 12 Jun 2013 12:43:19 -0400 Subject: [PATCH 57/99] Add genotyping accuracy assessment to AssessNA12878 -- Now table looks like: Name VariantType AssessmentType Count variant SNPS TRUE_POSITIVE 1220 variant SNPS FALSE_POSITIVE 0 variant SNPS FALSE_NEGATIVE 1 variant SNPS TRUE_NEGATIVE 150 variant SNPS CALLED_NOT_IN_DB_AT_ALL 0 variant SNPS HET_CONCORDANCE 100.00 variant SNPS HOMVAR_CONCORDANCE 99.63 variant INDELS TRUE_POSITIVE 273 variant INDELS FALSE_POSITIVE 0 variant INDELS FALSE_NEGATIVE 15 variant INDELS TRUE_NEGATIVE 79 variant INDELS CALLED_NOT_IN_DB_AT_ALL 2 variant INDELS HET_CONCORDANCE 98.67 variant INDELS HOMVAR_CONCORDANCE 89.58 -- Rewrite / refactored parts of subsetDiploidAlleles in GATKVariantContextUtils to have a BEST_MATCH assignment method that does it's best to simply match the genotype after subsetting to a set of alleles. So if the original GT was A/B and you subset to A/B it remains A/B but if you subset to A/C you get A/A. This means that het-alt B/C genotypes become A/B and A/C when subsetting to bi-allelics which is the convention in the KB. Add lots of unit tests for this functions (from 0 previously) -- BadSites in Assessment now emits TP sites with discordant genotypes with the type GENOTYPE_DISCORDANCE and tags the expected genotype in the info field as ExpectedGenotype, such as this record: 20 10769255 . A ATGTG 165.73 . ExpectedGenotype=HOM_VAR;SupportingCallsets=ebanks,depristo,CEUTrio_best_practices;WHY=GENOTYPE_DISCORDANCE GT:AD:DP:GQ:PL 0/1:1,9:10:6:360,0,6 Indicating that the call was a HET but the expected result was HOM_VAR -- Forbid subsetting of diploid genotypes to just a single allele. -- Added subsetToRef as a separate specific function. Use that in the DiploidExactAFCalc in the case that you need to reduce yourself to ref only. Preserves DP in the genotype field when this is possible, so a few integration tests have changed for the UG --- .../genotyper/afcalc/DiploidExactAFCalc.java | 7 +- ...dGenotyperIndelCallingIntegrationTest.java | 2 +- ...GenotyperNormalCallingIntegrationTest.java | 4 +- .../variant/GATKVariantContextUtils.java | 206 ++++++++++++---- .../GATKVariantContextUtilsUnitTest.java | 233 ++++++++++++++++-- 5 files changed, 380 insertions(+), 72 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java index 170b6e250..2ece18002 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java @@ -106,7 +106,7 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { alleles.add(vc.getReference()); alleles.addAll(chooseMostLikelyAlternateAlleles(vc, getMaxAltAlleles())); builder.alleles(alleles); - builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL)); return builder.make(); } else { return vc; @@ -352,6 +352,9 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { final List allelesToUse, final boolean assignGenotypes, final int ploidy) { - return GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, assignGenotypes); + return allelesToUse.size() == 1 + ? GATKVariantContextUtils.subsetToRefOnly(vc, ploidy) + : GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, + assignGenotypes ? GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN : GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 98a482c6f..64a27c4c3 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("294183823d678d3668f4fa98b4de6e06")); + Arrays.asList("facac578891a4f2be63ddd5ba6b9096b")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index bf4316415..f7c5e6fd5 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -64,7 +64,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultiSamplePilot1() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1, - Arrays.asList("5e8f1fa88dc93320cc0e75e9fe6e153b")); + Arrays.asList("474dfb943a307c86cabe2043970c58f3")); executeTest("test MultiSample Pilot1", spec); } @@ -80,7 +80,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testWithAllelesPassedIn2() { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommand + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "allelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,025,000", 1, - Arrays.asList("60115af273fde49c76d4df6c9c0f6501")); + Arrays.asList("3e646003c5b93da80c7d8e5d0ff2ee4e")); executeTest("test MultiSample Pilot2 with alleles passed in and emitting all sites", spec2); } diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java index b5a6e82a0..3bc5da82f 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java @@ -45,7 +45,7 @@ public class GATKVariantContextUtils { public static final int DEFAULT_PLOIDY = 2; public static final double SUM_GL_THRESH_NOCALL = -0.1; // if sum(gl) is bigger than this threshold, we treat GL's as non-informative and will force a no-call. - private static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); + protected static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); public final static String MERGE_FILTER_PREFIX = "filterIn"; public final static String MERGE_REF_IN_ALL = "ReferenceInAll"; public final static String MERGE_FILTER_IN_ALL = "FilteredInAll"; @@ -421,6 +421,37 @@ public class GATKVariantContextUtils { return true; // we passed all tests, we matched } + public enum GenotypeAssignmentMethod { + /** + * set all of the genotype GT values to NO_CALL + */ + SET_TO_NO_CALL, + + /** + * Use the subsetted PLs to greedily assigned genotypes + */ + USE_PLS_TO_ASSIGN, + + /** + * Try to match the original GT calls, if at all possible + * + * Suppose I have 3 alleles: A/B/C and the following samples: + * + * original_GT best_match to A/B best_match to A/C + * S1 => A/A A/A A/A + * S2 => A/B A/B A/A + * S3 => B/B B/B A/A + * S4 => B/C A/B A/C + * S5 => C/C A/A C/C + * + * Basically, all alleles not in the subset map to ref. It means that het-alt genotypes + * when split into 2 bi-allelic variants will be het in each, which is good in some cases, + * rather than the undetermined behavior when using the PLs to assign, which could result + * in hom-var or hom-ref for each, depending on the exact PL values. + */ + BEST_MATCH_TO_ORIGINAL + } + /** * subset the Variant Context to the specific set of alleles passed in (pruning the PLs appropriately) * @@ -430,22 +461,23 @@ public class GATKVariantContextUtils { * @return genotypes */ public static GenotypesContext subsetDiploidAlleles(final VariantContext vc, - final List allelesToUse, - final boolean assignGenotypes) { + final List allelesToUse, + final GenotypeAssignmentMethod assignGenotypes) { + if ( allelesToUse.get(0).isNonReference() ) throw new IllegalArgumentException("First allele must be the reference allele"); + if ( allelesToUse.size() == 1 ) throw new IllegalArgumentException("Cannot subset to only 1 alt allele"); // the genotypes with PLs final GenotypesContext oldGTs = vc.getGenotypes(); // the new genotypes to create final GenotypesContext newGTs = GenotypesContext.create(); + // optimization: if no input genotypes, just exit - if (oldGTs.isEmpty()) - return newGTs; + if (oldGTs.isEmpty()) return newGTs; // samples final List sampleIndices = oldGTs.getSampleNamesOrderedByName(); - // we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward final int numOriginalAltAlleles = vc.getAlternateAlleles().size(); final int expectedNumLikelihoods = GenotypeLikelihoods.numLikelihoods(vc.getNAlleles(), 2); @@ -456,8 +488,8 @@ public class GATKVariantContextUtils { // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles, // then we can keep the PLs as is; otherwise, we determine which ones to keep - if ( numNewAltAlleles != numOriginalAltAlleles && numNewAltAlleles > 0 ) { - likelihoodIndexesToUse = new ArrayList(30); + if ( numNewAltAlleles != numOriginalAltAlleles ) { + likelihoodIndexesToUse = new ArrayList<>(30); final boolean[] altAlleleIndexToUse = new boolean[numOriginalAltAlleles]; for ( int i = 0; i < numOriginalAltAlleles; i++ ) { @@ -478,55 +510,127 @@ public class GATKVariantContextUtils { // create the new genotypes for ( int k = 0; k < oldGTs.size(); k++ ) { final Genotype g = oldGTs.get(sampleIndices.get(k)); - if ( !g.hasLikelihoods() ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - continue; - } + final GenotypeBuilder gb = new GenotypeBuilder(g); // create the new likelihoods array from the alleles we are allowed to use - final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); double[] newLikelihoods; - if ( likelihoodIndexesToUse == null ) { - newLikelihoods = originalLikelihoods; - } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { - logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + if ( !g.hasLikelihoods() ) { + // we don't have any likelihoods, so we null out PLs and make G ./. newLikelihoods = null; + gb.noPL(); } else { - newLikelihoods = new double[likelihoodIndexesToUse.size()]; - int newIndex = 0; - for ( int oldIndex : likelihoodIndexesToUse ) - newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; + final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); + if ( likelihoodIndexesToUse == null ) { + newLikelihoods = originalLikelihoods; + } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { + logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + newLikelihoods = null; + } else { + newLikelihoods = new double[likelihoodIndexesToUse.size()]; + int newIndex = 0; + for ( int oldIndex : likelihoodIndexesToUse ) + newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; - // might need to re-normalize - newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); - } + // might need to re-normalize + newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); + } - // if there is no mass on the (new) likelihoods, then just no-call the sample - if ( newLikelihoods != null && MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - } - else { - final GenotypeBuilder gb = new GenotypeBuilder(g); - - if ( newLikelihoods == null || numNewAltAlleles == 0 ) + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) gb.noPL(); else gb.PL(newLikelihoods); - - // if we weren't asked to assign a genotype, then just no-call the sample - if ( !assignGenotypes || MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - gb.alleles(NO_CALL_ALLELES); - } - else { - // find the genotype with maximum likelihoods - int PLindex = numNewAltAlleles == 0 ? 0 : MathUtils.maxElementIndex(newLikelihoods); - GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); - - gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); - if ( numNewAltAlleles != 0 ) gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); - } - newGTs.add(gb.make()); } + + updateGenotypeAfterSubsetting(g.getAlleles(), gb, assignGenotypes, newLikelihoods, allelesToUse); + newGTs.add(gb.make()); + } + + return newGTs; + } + + private static boolean likelihoodsAreUninformative(final double[] likelihoods) { + return MathUtils.sum(likelihoods) > SUM_GL_THRESH_NOCALL; + } + + /** + * Add the genotype call (GT) field to GenotypeBuilder using the requested algorithm assignmentMethod + * + * @param originalGT the original genotype calls, cannot be null + * @param gb the builder where we should put our newly called alleles, cannot be null + * @param assignmentMethod the method to use to do the assignment, cannot be null + * @param newLikelihoods a vector of likelihoods to use if the method requires PLs, should be log10 likelihoods, cannot be null + * @param allelesToUse the alleles we are using for our subsetting + */ + protected static void updateGenotypeAfterSubsetting(final List originalGT, + final GenotypeBuilder gb, + final GenotypeAssignmentMethod assignmentMethod, + final double[] newLikelihoods, + final List allelesToUse) { + gb.noAD(); + switch ( assignmentMethod ) { + case SET_TO_NO_CALL: + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + break; + case USE_PLS_TO_ASSIGN: + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) { + // if there is no mass on the (new) likelihoods, then just no-call the sample + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + } else { + // find the genotype with maximum likelihoods + final int PLindex = MathUtils.maxElementIndex(newLikelihoods); + GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); + gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); + gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); + } + break; + case BEST_MATCH_TO_ORIGINAL: + final List best = new LinkedList<>(); + final Allele ref = allelesToUse.get(0); // WARNING -- should be checked in input argument + for ( final Allele originalAllele : originalGT ) { + best.add(allelesToUse.contains(originalAllele) ? originalAllele : ref); + } + gb.noGQ(); + gb.noPL(); + gb.alleles(best); + break; + } + } + + /** + * Subset the samples in VC to reference only information with ref call alleles + * + * Preserves DP if present + * + * @param vc the variant context to subset down to + * @param ploidy ploidy to use if a genotype doesn't have any alleles + * @return a GenotypesContext + */ + public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) { + if ( vc == null ) throw new IllegalArgumentException("vc cannot be null"); + if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy); + + // the genotypes with PLs + final GenotypesContext oldGTs = vc.getGenotypes(); + + // optimization: if no input genotypes, just exit + if (oldGTs.isEmpty()) return oldGTs; + + // the new genotypes to create + final GenotypesContext newGTs = GenotypesContext.create(); + + final Allele ref = vc.getReference(); + final List diploidRefAlleles = Arrays.asList(ref, ref); + + // create the new genotypes + for ( final Genotype g : vc.getGenotypes() ) { + final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy(); + final List refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref); + final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles); + if ( g.hasDP() ) gb.DP(g.getDP()); + if ( g.hasGQ() ) gb.GQ(g.getGQ()); + newGTs.add(gb.make()); } return newGTs; @@ -539,7 +643,7 @@ public class GATKVariantContextUtils { * @return genotypes context */ public static GenotypesContext assignDiploidGenotypes(final VariantContext vc) { - return subsetDiploidAlleles(vc, vc.getAlleles(), true); + return subsetDiploidAlleles(vc, vc.getAlleles(), GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); } /** @@ -557,7 +661,7 @@ public class GATKVariantContextUtils { * @return a list of bi-allelic (or monomorphic) variant context */ public static List splitVariantContextToBiallelics(final VariantContext vc) { - return splitVariantContextToBiallelics(vc, false); + return splitVariantContextToBiallelics(vc, false, GenotypeAssignmentMethod.SET_TO_NO_CALL); } /** @@ -575,18 +679,18 @@ public class GATKVariantContextUtils { * @param trimLeft if true, we will also left trim alleles, potentially moving the resulting vcs forward on the genome * @return a list of bi-allelic (or monomorphic) variant context */ - public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft) { + public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft, final GenotypeAssignmentMethod genotypeAssignmentMethod) { if ( ! vc.isVariant() || vc.isBiallelic() ) // non variant or biallelics already satisfy the contract return Collections.singletonList(vc); else { - final List biallelics = new LinkedList(); + final List biallelics = new LinkedList<>(); for ( final Allele alt : vc.getAlternateAlleles() ) { VariantContextBuilder builder = new VariantContextBuilder(vc); final List alleles = Arrays.asList(vc.getReference(), alt); builder.alleles(alleles); - builder.genotypes(subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(subsetDiploidAlleles(vc, alleles, genotypeAssignmentMethod)); VariantContextUtils.calculateChromosomeCounts(builder, true); final VariantContext trimmed = trimAlleles(builder.make(), trimLeft, true); biallelics.add(trimmed); diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java index fcc7c7998..937698d82 100644 --- a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java @@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.variant; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.variant.variantcontext.*; @@ -39,6 +40,7 @@ import org.testng.annotations.Test; import java.util.*; public class GATKVariantContextUtilsUnitTest extends BaseTest { + private final static boolean DEBUG = false; Allele Aref, T, C, G, Cref, ATC, ATCATC; @@ -168,7 +170,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeAllelesTest.getTests(MergeAllelesTest.class); } - @Test(dataProvider = "mergeAlleles") + @Test(enabled = !DEBUG, dataProvider = "mergeAlleles") public void testMergeAlleles(MergeAllelesTest cfg) { final List inputs = new ArrayList(); @@ -229,7 +231,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return SimpleMergeRSIDTest.getTests(SimpleMergeRSIDTest.class); } - @Test(dataProvider = "simplemergersiddata") + @Test(enabled = !DEBUG, dataProvider = "simplemergersiddata") public void testRSIDMerge(SimpleMergeRSIDTest cfg) { VariantContext snpVC1 = makeVC("snpvc1", Arrays.asList(Aref, T)); final List inputs = new ArrayList(); @@ -352,7 +354,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeFilteredTest.getTests(MergeFilteredTest.class); } - @Test(dataProvider = "mergeFiltered") + @Test(enabled = !DEBUG, dataProvider = "mergeFiltered") public void testMergeFiltered(MergeFilteredTest cfg) { final List priority = vcs2priority(cfg.inputs); final VariantContext merged = GATKVariantContextUtils.simpleMerge( @@ -479,7 +481,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeGenotypesTest.getTests(MergeGenotypesTest.class); } - @Test(dataProvider = "mergeGenotypes") + @Test(enabled = !DEBUG, dataProvider = "mergeGenotypes") public void testMergeGenotypes(MergeGenotypesTest cfg) { final VariantContext merged = GATKVariantContextUtils.simpleMerge( cfg.inputs, cfg.priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, @@ -517,7 +519,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test + @Test(enabled = !DEBUG) public void testMergeGenotypesUniquify() { final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)); final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)); @@ -547,7 +549,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { // // -------------------------------------------------------------------------------- - @Test + @Test(enabled = !DEBUG) public void testAnnotationSet() { for ( final boolean annotate : Arrays.asList(true, false)) { for ( final String set : Arrays.asList("set", "combine", "x")) { @@ -618,7 +620,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return ReverseClippingPositionTestProvider.getTests(ReverseClippingPositionTestProvider.class); } - @Test(dataProvider = "ReverseClippingPositionTestProvider") + @Test(enabled = !DEBUG, dataProvider = "ReverseClippingPositionTestProvider") public void testReverseClippingPositionTestProvider(ReverseClippingPositionTestProvider cfg) { int result = GATKVariantContextUtils.computeReverseClipping(cfg.alleles, cfg.ref.getBytes()); Assert.assertEquals(result, cfg.expectedClip); @@ -706,7 +708,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "SplitBiallelics") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics") public void testSplitBiallelicsNoGenotypes(final VariantContext vc, final List expectedBiallelics) { final List biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vc); Assert.assertEquals(biallelics.size(), expectedBiallelics.size()); @@ -717,7 +719,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test(dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") public void testSplitBiallelicsGenotypes(final VariantContext vc, final List expectedBiallelics) { final List genotypes = new ArrayList(); @@ -745,7 +747,6 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - // -------------------------------------------------------------------------------- // // Test repeats @@ -810,14 +811,14 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return RepeatDetectorTest.getTests(RepeatDetectorTest.class); } - @Test(dataProvider = "RepeatDetectorTest") + @Test(enabled = !DEBUG, dataProvider = "RepeatDetectorTest") public void testRepeatDetectorTest(RepeatDetectorTest cfg) { // test alleles are equal Assert.assertEquals(GATKVariantContextUtils.isTandemRepeat(cfg.vc, cfg.ref.getBytes()), cfg.isTrueRepeat); } - @Test + @Test(enabled = !DEBUG) public void testRepeatAllele() { Allele nullR = Allele.create("A", true); Allele nullA = Allele.create("A", false); @@ -940,7 +941,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ForwardClippingData") + @Test(enabled = !DEBUG, dataProvider = "ForwardClippingData") public void testForwardClipping(final List alleleStrings, final int expectedClip) { final List alleles = new LinkedList(); for ( final String alleleString : alleleStrings ) @@ -975,7 +976,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ClipAlleleTest") + @Test(enabled = !DEBUG, dataProvider = "ClipAlleleTest") public void testClipAlleles(final List alleleStrings, final List expected, final int numLeftClipped) { final int start = 10; final VariantContext unclipped = GATKVariantContextUtils.makeFromAlleles("test", "20", start, alleleStrings); @@ -1019,7 +1020,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "PrimitiveAlleleSplittingData") + @Test(enabled = !DEBUG, dataProvider = "PrimitiveAlleleSplittingData") public void testPrimitiveAlleleSplitting(final String ref, final String alt, final int expectedSplit, final List variantPositions) { final int start = 10; @@ -1066,7 +1067,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "AlleleRemappingData") + @Test(enabled = !DEBUG, dataProvider = "AlleleRemappingData") public void testAlleleRemapping(final Map alleleMap, final int numGenotypes) { final GATKVariantContextUtils.AlleleMapper alleleMapper = new GATKVariantContextUtils.AlleleMapper(alleleMap); @@ -1102,4 +1103,204 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return gc; } + + // -------------------------------------------------------------------------------- + // + // Test subsetDiploidAlleles + // + // -------------------------------------------------------------------------------- + + @DataProvider(name = "subsetDiploidAllelesData") + public Object[][] makesubsetDiploidAllelesData() { + List tests = new ArrayList<>(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + + final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make(); + + final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01}); + final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01}); + final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9}); + final double[] uninformative = new double[]{0, 0, 0}; + + final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(50).make(); + + // make sure we don't screw up the simple case + final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).GQ(8).make(); + final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10,2}).PL(hetPL).GQ(8).make(); + final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10,2}).PL(homVarPL).GQ(8).make(); + + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), AC, Arrays.asList(new GenotypeBuilder(aaGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), AC, Arrays.asList(new GenotypeBuilder(acGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), AC, Arrays.asList(new GenotypeBuilder(ccGT).noAD().make())}); + + // uninformative test case + final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).noAD().PL(uninformative).GQ(0).make(); + final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noAD().noPL().noGQ().make(); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), AC, Arrays.asList(emptyGT)}); + + // actually subsetting down from multiple alt values + final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50}; + final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50}; + final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50}; + final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50}; + final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG + final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0}; // AA, AC, CC, AG, CG, GG + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homRef3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AC).PL(new double[]{-10, 0, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(CC).PL(new double[]{-20, -10, 0}).noAD().GQ(100).make())}); + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AG).PL(new double[]{-20, 0, -50}).noAD().GQ(200).make())}); + + // wow, scary -- bad output but discussed with Eric and we think this is the only thing that can be done + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetCG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).noAD().GQ(200).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(GG).PL(new double[]{-20, -40, 0}).noAD().GQ(200).make())}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "subsetDiploidAllelesData") + public void testsubsetDiploidAllelesData(final VariantContext inputVC, + final List allelesToUse, + final List expectedGenotypes) { + final GenotypesContext actual = GATKVariantContextUtils.subsetDiploidAlleles(inputVC, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); + + Assert.assertEquals(actual.size(), expectedGenotypes.size()); + for ( final Genotype expected : expectedGenotypes ) { + final Genotype actualGT = actual.get(expected.getSampleName()); + Assert.assertNotNull(actualGT); + assertGenotypesAreEqual(actualGT, expected); + } + } + + @DataProvider(name = "UpdateGenotypeAfterSubsettingData") + public Object[][] makeUpdateGenotypeAfterSubsettingData() { + List tests = new ArrayList(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + final List> allSubsetAlleles = Arrays.asList(AC,AG,ACG); + + final double[] homRefPL = new double[]{0.9, 0.09, 0.01}; + final double[] hetPL = new double[]{0.09, 0.9, 0.01}; + final double[] homVarPL = new double[]{0.01, 0.09, 0.9}; + final double[] uninformative = new double[]{0.33, 0.33, 0.33}; + final List allPLs = Arrays.asList(homRefPL, hetPL, homVarPL, uninformative); + + for ( final List alleles : allSubsetAlleles ) { + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL, pls, AA, alleles, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + } + + for ( final List originalGT : Arrays.asList(AA, AC, CC, AG, CG, GG) ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homRefPL, originalGT, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, hetPL, originalGT, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homVarPL, originalGT, AC, CC}); +// tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, uninformative, AA, AC, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AC, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AC, AC}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AG, AG}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, ACG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, ACG, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, ACG, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AG, ACG, AG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, ACG, CG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, GG, ACG, GG}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = !DEBUG, dataProvider = "UpdateGenotypeAfterSubsettingData") + public void testUpdateGenotypeAfterSubsetting(final GATKVariantContextUtils.GenotypeAssignmentMethod mode, + final double[] likelihoods, + final List originalGT, + final List allelesToUse, + final List expectedAlleles) { + final GenotypeBuilder gb = new GenotypeBuilder("test"); + final double[] log10Likelhoods = MathUtils.normalizeFromLog10(likelihoods, true, false); + GATKVariantContextUtils.updateGenotypeAfterSubsetting(originalGT, gb, mode, log10Likelhoods, allelesToUse); + final Genotype g = gb.make(); + Assert.assertEquals(new HashSet<>(g.getAlleles()), new HashSet<>(expectedAlleles)); + } + + @Test(enabled = !DEBUG) + public void testSubsetToRef() { + final Map tests = new LinkedHashMap<>(); + + for ( final List alleles : Arrays.asList(Arrays.asList(Aref), Arrays.asList(C), Arrays.asList(Aref, C), Arrays.asList(Aref, C, C) ) ) { + for ( final String name : Arrays.asList("test1", "test2") ) { + final GenotypeBuilder builder = new GenotypeBuilder(name, alleles); + builder.DP(10); + builder.GQ(30); + builder.AD(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3})); + builder.PL(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1,2} : new int[]{1,2,3})); + final List refs = Collections.nCopies(alleles.size(), Aref); + tests.put(builder.make(), builder.alleles(refs).noAD().noPL().make()); + } + } + + for ( final int n : Arrays.asList(1, 2, 3) ) { + for ( final List genotypes : Utils.makePermutations(new ArrayList<>(tests.keySet()), n, false) ) { + final VariantContext vc = new VariantContextBuilder("test", "20", 1, 1, Arrays.asList(Aref, C)).genotypes(genotypes).make(); + final GenotypesContext gc = GATKVariantContextUtils.subsetToRefOnly(vc, 2); + + Assert.assertEquals(gc.size(), genotypes.size()); + for ( int i = 0; i < genotypes.size(); i++ ) { +// logger.warn("Testing " + genotypes.get(i) + " => " + gc.get(i) + " " + tests.get(genotypes.get(i))); + assertGenotypesAreEqual(gc.get(i), tests.get(genotypes.get(i))); + } + } + } + } } \ No newline at end of file From 6232db3157acdc3a27ba7a21401d605010baf19a Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 13 Jun 2013 15:18:28 -0400 Subject: [PATCH 58/99] Remove STANDARD option from GATKRunReport -- AWS is now the default. Removed old code the referred to the STANDARD type. Deleted unused variables and functions. --- .../arguments/GATKArgumentCollection.java | 4 +- .../sting/gatk/phonehome/GATKRunReport.java | 71 +------------------ .../org/broadinstitute/sting/WalkerTest.java | 2 +- 3 files changed, 5 insertions(+), 72 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index dc3d67283..0b1f341f0 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -69,8 +69,8 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? STANDARD is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) - public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.STANDARD; + @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? AWS is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) + public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.AWS; @Argument(fullName = "gatk_key", shortName = "K", doc="GATK Key file. Required if running with -et NO_ET. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) public File gatkKeyFile = null; diff --git a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java index 9704454c9..67d72189c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java +++ b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java @@ -78,22 +78,6 @@ public class GATKRunReport { private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH.mm.ss"); - /** - * The root file system directory where we keep common report data - */ - private final static File REPORT_DIR = new File("/humgen/gsa-hpprojects/GATK/reports"); - - /** - * The full path to the direct where submitted (and uncharacterized) report files are written - */ - private final static File REPORT_SUBMIT_DIR = new File(REPORT_DIR.getAbsolutePath() + "/submitted"); - - /** - * Full path to the sentinel file that controls whether reports are written out. If this file doesn't - * exist, no long will be written - */ - private final static File REPORT_SENTINEL = new File(REPORT_DIR.getAbsolutePath() + "/ENABLE"); - /** * our log */ @@ -181,8 +165,6 @@ public class GATKRunReport { public enum PhoneHomeOption { /** Disable phone home */ NO_ET, - /** Standard option. Writes to local repository if it can be found, or S3 otherwise */ - STANDARD, /** Forces the report to go to S3 */ AWS, /** Force output to STDOUT. For debugging only */ @@ -365,14 +347,9 @@ public class GATKRunReport { switch (type) { case NO_ET: // don't do anything return false; - case STANDARD: case AWS: - if ( type == PhoneHomeOption.STANDARD && repositoryIsOnline() ) { - return postReportToLocalDisk(getLocalReportFullPath()) != null; - } else { - wentToAWS = true; - return postReportToAWSS3() != null; - } + wentToAWS = true; + return postReportToAWSS3() != null; case STDOUT: return postReportToStream(System.out); default: @@ -404,50 +381,6 @@ public class GATKRunReport { } } - /** - * Get the full path as a file where we'll write this report to local disl - * @return a non-null File - */ - @Ensures("result != null") - protected File getLocalReportFullPath() { - return new File(REPORT_SUBMIT_DIR, getReportFileName()); - } - - /** - * Is the local GATKRunReport repository available for writing reports? - * - * @return true if and only if the common run report repository is available and online to receive reports - */ - private boolean repositoryIsOnline() { - return false; // REPORT_SENTINEL.exists(); - } - - - /** - * Main entry point to writing reports to disk. Posts the XML report to the common GATK run report repository. - * If this process fails for any reason, all exceptions are handled and this routine merely prints a warning. - * That is, postReport() is guarenteed not to fail for any reason. - * - * @return the path where the file was written, or null if any failure occurred - */ - @Requires("destination != null") - private File postReportToLocalDisk(final File destination) { - try { - final BufferedOutputStream out = new BufferedOutputStream( - new GZIPOutputStream( - new FileOutputStream(destination))); - postReportToStream(out); - out.close(); - logger.debug("Wrote report to " + destination); - return destination; - } catch ( Exception e ) { - // we catch everything, and no matter what eat the error - exceptDuringRunReport("Couldn't read report file", e); - destination.delete(); - return null; - } - } - // --------------------------------------------------------------------------- // // Code for sending reports to s3 diff --git a/public/java/test/org/broadinstitute/sting/WalkerTest.java b/public/java/test/org/broadinstitute/sting/WalkerTest.java index 40f1f7bcd..422ddbfb0 100644 --- a/public/java/test/org/broadinstitute/sting/WalkerTest.java +++ b/public/java/test/org/broadinstitute/sting/WalkerTest.java @@ -220,7 +220,7 @@ public class WalkerTest extends BaseTest { String args = this.args; if ( includeImplicitArgs ) { args = args + (ENABLE_PHONE_HOME_FOR_TESTS ? - String.format(" -et %s ", GATKRunReport.PhoneHomeOption.STANDARD) : + String.format(" -et %s ", GATKRunReport.PhoneHomeOption.AWS) : String.format(" -et %s -K %s ", GATKRunReport.PhoneHomeOption.NO_ET, gatkKeyFile)); if ( includeShadowBCF && GENERATE_SHADOW_BCF ) args = args + " --generateShadowBCF "; From 74f311c973820e97c8da36a77095c63e3d098454 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 13 Jun 2013 15:46:16 -0400 Subject: [PATCH 59/99] Emit the GATK version number in the VCF header -- Looks like ##GATKVersion=2.5-159-g3f91d93 in the VCF header line -- delivers [#51595305] --- .../io/stubs/VariantContextWriterStub.java | 13 ++++++++++++ .../gatk/EngineFeaturesIntegrationTest.java | 21 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java index 5c80da214..8b7c4282b 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.io.stubs; import net.sf.samtools.SAMSequenceDictionary; import org.broadinstitute.sting.gatk.CommandLineExecutable; +import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.utils.classloader.JVMUtils; @@ -53,6 +54,7 @@ import java.util.List; * @version 0.1 */ public class VariantContextWriterStub implements Stub, VariantContextWriter { + public final static String GATK_VERSION_KEY = "GATKVersion"; public final static boolean UPDATE_CONTIG_HEADERS = true; /** @@ -225,6 +227,9 @@ public class VariantContextWriterStub implements Stub, Var if ( header.isWriteEngineHeaders() ) { // skip writing the command line header if requested if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) { + // write the GATK version if we have command line information enabled + vcfHeader.addMetaDataLine(getGATKVersionHeaderLine()); + // Check for the command-line argument header line. If not present, add it in. final VCFHeaderLine commandLineArgHeaderLine = getCommandLineArgumentHeaderLine(); final boolean foundCommandLineHeaderLine = vcfHeader.getMetaDataLine(commandLineArgHeaderLine.getKey()) != null; @@ -284,4 +289,12 @@ public class VariantContextWriterStub implements Stub, Var CommandLineExecutable executable = JVMUtils.getObjectOfType(argumentSources,CommandLineExecutable.class); return new VCFHeaderLine(executable.getAnalysisName(), "\"" + engine.createApproximateCommandLineArgumentString(argumentSources.toArray()) + "\""); } + + /** + * Gets the GATK version header line for the VCF file + * @return non-null VCFHeaderLine. + */ + private VCFHeaderLine getGATKVersionHeaderLine() { + return new VCFHeaderLine(GATK_VERSION_KEY, CommandLineGATK.getVersionNumber()); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index b5b82f869..226224199 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -25,10 +25,12 @@ package org.broadinstitute.sting.gatk; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; +import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ReadFilters; import org.broadinstitute.sting.gatk.walkers.ReadWalker; @@ -36,9 +38,15 @@ import org.broadinstitute.sting.gatk.walkers.qc.ErrorThrowing; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.variant.vcf.VCFCodec; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.File; +import java.io.FileInputStream; import java.io.PrintStream; import java.util.Arrays; @@ -191,4 +199,17 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { 1, UserException.class); executeTest("badCompress " + compress, spec); } + + @Test(enabled = true) + public void testGATKVersionInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000", + 1, Arrays.asList("")); + final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + final VCFHeaderLine versionLine = header.getMetaDataLine(VariantContextWriterStub.GATK_VERSION_KEY); + Assert.assertNotNull(versionLine); + Assert.assertEquals(versionLine.getValue(), CommandLineGATK.getVersionNumber()); + } } \ No newline at end of file From f9c986be7448943f11d5d18083b5b1a2fa77acd2 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Thu, 13 Jun 2013 15:30:10 -0400 Subject: [PATCH 60/99] Remove com.sun.javadoc.* dependencies from the GATK proper, and isolate them for doclet use only Problem: Classes in com.sun.javadoc.* are non-standard. Since we can't depend on their availability for all users, the GATK proper should not have any runtime dependencies on this package. Solution: -Isolate com.sun.javadoc.* dependencies in a DocletUtils class for use only by doclets. The only users who need to run our doclets are those who compile from source, and they should be competent enough to figure out how to resolve a missing com.sun.* dependency. -HelpUtils now contains no com.sun.javadoc.* dependencies and can be safely used by walkers/other tools. -Added comments with instructions on when it is safe to use DocletUtils vs. HelpUtils [delivers #51450385] [delivers #50387199] --- .../sting/utils/help/DocletUtils.java | 76 +++++++++++++++++++ .../sting/utils/help/GATKDoclet.java | 2 +- .../help/GenericDocumentationHandler.java | 6 +- .../sting/utils/help/HelpUtils.java | 48 ++---------- .../help/ResourceBundleExtractorDoclet.java | 4 +- .../sting/utils/runtime/ProcessSettings.java | 1 - 6 files changed, 87 insertions(+), 50 deletions(-) create mode 100644 public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java diff --git a/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java new file mode 100644 index 000000000..1e9a37cb7 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java @@ -0,0 +1,76 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.help; + +import com.sun.javadoc.FieldDoc; +import com.sun.javadoc.PackageDoc; +import com.sun.javadoc.ProgramElementDoc; +import org.broadinstitute.sting.utils.classloader.JVMUtils; + +import java.lang.reflect.Field; + +/** + * Methods in the class must ONLY be used by doclets, since the com.sun.javadoc.* classes are not + * available on all systems, and we don't want the GATK proper to depend on them. + */ +public class DocletUtils { + + protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { + try { + Class type = getClassForDoc(classDoc); + return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); + } catch (Throwable t) { + // Ignore errors. + return false; + } + } + + protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { + return Class.forName(getClassName(doc)); + } + + protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { + try { + Class clazz = getClassForDoc(fieldDoc.containingClass()); + return JVMUtils.findField(clazz, fieldDoc.name()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + /** + * Reconstitute the class name from the given class JavaDoc object. + * + * @param doc the Javadoc model for the given class. + * @return The (string) class name of the given class. + */ + protected static String getClassName(ProgramElementDoc doc) { + PackageDoc containingPackage = doc.containingPackage(); + return containingPackage.name().length() > 0 ? + String.format("%s.%s", containingPackage.name(), doc.name()) : + String.format("%s", doc.name()); + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java index 677bbf2e5..63cb0900a 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java @@ -352,7 +352,7 @@ public class GATKDoclet { private Class getClassForClassDoc(ClassDoc doc) { try { // todo -- what do I need the ? extends Object to pass the compiler? - return (Class) HelpUtils.getClassForDoc(doc); + return (Class) DocletUtils.getClassForDoc(doc); } catch (ClassNotFoundException e) { //logger.warn("Couldn't find class for ClassDoc " + doc); // we got a classdoc for a class we can't find. Maybe in a library or something diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java index 1711a3923..02c269495 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java @@ -68,7 +68,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { @Override public boolean includeInDocs(ClassDoc doc) { try { - Class type = HelpUtils.getClassForDoc(doc); + Class type = DocletUtils.getClassForDoc(doc); boolean hidden = !getDoclet().showHiddenFeatures() && type.isAnnotationPresent(Hidden.class); return !hidden && JVMUtils.isConcrete(type); } catch (ClassNotFoundException e) { @@ -157,7 +157,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { root.put("arguments", args); try { // loop over all of the arguments according to the parsing engine - for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(HelpUtils.getClassForDoc(toProcess.classDoc))) { + for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(DocletUtils.getClassForDoc(toProcess.classDoc))) { // todo -- why can you have multiple ones? ArgumentDefinition argDef = argumentSource.createArgumentDefinitions().get(0); FieldDoc fieldDoc = getFieldDoc(toProcess.classDoc, argumentSource.field.getName()); @@ -663,7 +663,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { if (fieldDoc.name().equals(name)) return fieldDoc; - Field field = HelpUtils.getFieldForFieldDoc(fieldDoc); + Field field = DocletUtils.getFieldForFieldDoc(fieldDoc); if (field == null) throw new RuntimeException("Could not find the field corresponding to " + fieldDoc + ", presumably because the field is inaccessible"); if (field.isAnnotationPresent(ArgumentCollection.class)) { diff --git a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java index 9a23fd022..74516672d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java @@ -25,57 +25,20 @@ package org.broadinstitute.sting.utils.help; -import com.sun.javadoc.FieldDoc; -import com.sun.javadoc.PackageDoc; -import com.sun.javadoc.ProgramElementDoc; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotationType; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.classloader.JVMUtils; import org.broadinstitute.sting.utils.classloader.PluginManager; -import java.lang.reflect.Field; import java.util.List; +/** + * NON-javadoc/doclet help-related utility methods should go here. Anything with a com.sun.javadoc.* dependency + * should go into DocletUtils for use only by doclets. + */ public class HelpUtils { - protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { - try { - Class type = getClassForDoc(classDoc); - return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); - } catch (Throwable t) { - // Ignore errors. - return false; - } - } - - protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { - return Class.forName(getClassName(doc)); - } - - protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { - try { - Class clazz = getClassForDoc(fieldDoc.containingClass()); - return JVMUtils.findField(clazz, fieldDoc.name()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - - /** - * Reconstitute the class name from the given class JavaDoc object. - * - * @param doc the Javadoc model for the given class. - * @return The (string) class name of the given class. - */ - protected static String getClassName(ProgramElementDoc doc) { - PackageDoc containingPackage = doc.containingPackage(); - return containingPackage.name().length() > 0 ? - String.format("%s.%s", containingPackage.name(), doc.name()) : - String.format("%s", doc.name()); - } - /** * Simple method to print a list of available annotations. */ @@ -98,5 +61,4 @@ public class HelpUtils { System.out.println("\t" + c.getSimpleName()); System.out.println(); } - -} \ No newline at end of file +} diff --git a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java index 0f2383b4b..ac85d7aff 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java @@ -108,7 +108,7 @@ public class ResourceBundleExtractorDoclet { if(isRequiredJavadocMissing(currentClass) && isWalker(currentClass)) undocumentedWalkers.add(currentClass.name()); - renderHelpText(HelpUtils.getClassName(currentClass),currentClass); + renderHelpText(DocletUtils.getClassName(currentClass),currentClass); } for(PackageDoc currentPackage: packages) @@ -173,7 +173,7 @@ public class ResourceBundleExtractorDoclet { * @return True if the class of the given name is a walker. False otherwise. */ protected static boolean isWalker(ClassDoc classDoc) { - return HelpUtils.assignableToClass(classDoc, Walker.class, true); + return DocletUtils.assignableToClass(classDoc, Walker.class, true); } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java index 8aafd6034..659523641 100644 --- a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java +++ b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java @@ -25,7 +25,6 @@ package org.broadinstitute.sting.utils.runtime; -import com.sun.corba.se.spi.orbutil.fsm.Input; import java.io.File; import java.util.Map; From d1672926881874f007b3ade7d613b17470e5261c Mon Sep 17 00:00:00 2001 From: David Roazen Date: Fri, 14 Jun 2013 15:30:17 -0400 Subject: [PATCH 63/99] Reduce number of leftover temp files in GATK runs -WalkerTest now deletes *.idx files on exit -ArtificialBAMBuilder now deletes *.bai files on exit -VariantsToBinaryPed walker now deletes its temp files on exit --- .../sting/gatk/walkers/variantutils/VariantsToBinaryPed.java | 1 + .../broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java | 5 +++++ public/java/test/org/broadinstitute/sting/WalkerTest.java | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java index 8d16e6ca2..c414b443e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java @@ -176,6 +176,7 @@ public class VariantsToBinaryPed extends RodWalker { // Cut down on memory. try { File temp = File.createTempFile("VariantsToBPed_"+sample, ".tmp"); + temp.deleteOnExit(); printMap.put(sample,new PrintStream(temp)); tempFiles.put(sample,temp); } catch (IOException e) { diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java index bf3045c71..8d496ab96 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java @@ -182,6 +182,11 @@ public class ArtificialBAMBuilder { try { final File file = File.createTempFile("tempBAM", ".bam"); file.deleteOnExit(); + + // Register the bam index file for deletion on exit as well: + new File(file.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(file.getAbsolutePath() + ".bai").deleteOnExit(); + return makeBAMFile(file); } catch ( IOException e ) { throw new RuntimeException(e); diff --git a/public/java/test/org/broadinstitute/sting/WalkerTest.java b/public/java/test/org/broadinstitute/sting/WalkerTest.java index 422ddbfb0..78f67967b 100644 --- a/public/java/test/org/broadinstitute/sting/WalkerTest.java +++ b/public/java/test/org/broadinstitute/sting/WalkerTest.java @@ -312,6 +312,10 @@ public class WalkerTest extends BaseTest { for (int i = 0; i < spec.nOutputFiles; i++) { String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i); File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext); + + // Mark corresponding *.idx for deletion on exit as well just in case an index is created for the temp file: + new File(fl.getAbsolutePath() + ".idx").deleteOnExit(); + tmpFiles.add(fl); } From 1677a0a458e83075e2b0c0e14d45f33d39690593 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 14 Jun 2013 15:56:13 -0400 Subject: [PATCH 64/99] Simpler FILTER and info field encoding for BeagleOutputToVCF -- Previous version created FILTERs for each possible alt allele when that site was set to monomorphic by BEAGLE. So if you had a A/C SNP in the original file and beagle thought it was AC=0, then you'd get a record with BGL_RM_WAS_A in the FILTER field. This obviously would cause problems for indels, as so the tool was blowing up in this case. Now beagle sets the filter field to BGL_SET_TO_MONOMORPHIC and sets the info field annotation OriginalAltAllele to A instead. This works in general with any type of allele. -- Here's an example output line from the previous and current versions: old: 20 64150 rs7274499 C . 3041.68 BGL_RM_WAS_A AN=566;DB;DP=1069;Dels=0.00;HRun=0;HaplotypeScore=238.33;LOD=3.5783;MQ=83.74;MQ0=0;NumGenotypesChanged=1;OQ=1949.35;QD=10.95;SB=-6918.88 new: 20 64062 . G . 100.39 BGL_SET_TO_MONOMORPHIC AN=566;DP=1108;Dels=0.00;HRun=2;HaplotypeScore=221.59;LOD=-0.5051;MQ=85.69;MQ0=0;NumGenotypesChanged=1;OQ=189.66;OriginalAltAllele=A;QD=15.81;SB=-6087.15 -- update MD5s to reflect these changes -- [delivers #50847721] --- .../gatk/walkers/beagle/BeagleIntegrationTest.java | 4 ++-- .../gatk/walkers/beagle/BeagleOutputToVCF.java | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java index 69a5fc65f..5601d66fb 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java @@ -62,7 +62,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:BEAGLE " + beagleValidationDataLocation + "inttestbgl.r2 " + "--beagleProbs:BEAGLE " + beagleValidationDataLocation + "inttestbgl.gprobs " + "--beaglePhased:BEAGLE " + beagleValidationDataLocation + "inttestbgl.phased " + - "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("c5522304abf0633041c7772dd7dafcea")); + "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("989449fa3e262b88ba126867fa3ad9fb")); spec.disableShadowBCF(); executeTest("test BeagleOutputToVCF", spec); } @@ -96,7 +96,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.r2 "+ "--beagleProbs:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.gprobs.bgl "+ "--beaglePhased:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.phased.bgl "+ - "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("d8906b67c7f9fdb5b37b8e9e050982d3")); + "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("e036636fcd6a748ede4a70ea47941d47")); spec.disableShadowBCF(); executeTest("testBeagleChangesSitesToRef",spec); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java index 15bd79586..7d5ad9b8a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java @@ -129,6 +129,9 @@ public class BeagleOutputToVCF extends RodWalker { private final double MIN_PROB_ERROR = 0.000001; private final double MAX_GENOTYPE_QUALITY = -6.0; + private final static String BEAGLE_MONO_FILTER_STRING = "BGL_SET_TO_MONOMORPHIC"; + private final static String ORIGINAL_ALT_ALLELE_INFO_KEY = "OriginalAltAllele"; + public void initialize() { // setup the header fields @@ -138,10 +141,8 @@ public class BeagleOutputToVCF extends RodWalker { hInfo.add(new VCFFormatHeaderLine("OG",1, VCFHeaderLineType.String, "Original Genotype input to Beagle")); hInfo.add(new VCFInfoHeaderLine("R2", 1, VCFHeaderLineType.Float, "r2 Value reported by Beagle on each site")); hInfo.add(new VCFInfoHeaderLine("NumGenotypesChanged", 1, VCFHeaderLineType.Integer, "The number of genotypes changed by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_A", "This 'A' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_C", "This 'C' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_G", "This 'G' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_T", "This 'T' site was set to monomorphic by Beagle")); + hInfo.add(new VCFInfoHeaderLine(ORIGINAL_ALT_ALLELE_INFO_KEY, 1, VCFHeaderLineType.String, "The original alt allele for a site set to monomorphic by Beagle")); + hInfo.add(new VCFFilterHeaderLine(BEAGLE_MONO_FILTER_STRING, "This site was set to monomorphic by Beagle")); if ( comp.isBound() ) { hInfo.add(new VCFInfoHeaderLine("ACH", 1, VCFHeaderLineType.Integer, "Allele Count from Comparison ROD at this site")); @@ -335,9 +336,8 @@ public class BeagleOutputToVCF extends RodWalker { final VariantContextBuilder builder = new VariantContextBuilder(vc_input).source("outputvcf").genotypes(genotypes); if ( ! ( beagleVarCounts > 0 || DONT_FILTER_MONOMORPHIC_SITES ) ) { - Set removedFilters = vc_input.filtersWereApplied() ? new HashSet(vc_input.getFilters()) : new HashSet(1); - removedFilters.add(String.format("BGL_RM_WAS_%s",vc_input.getAlternateAllele(0))); - builder.alleles(new HashSet(Arrays.asList(vc_input.getReference()))).filters(removedFilters); + builder.attribute(ORIGINAL_ALT_ALLELE_INFO_KEY, vc_input.getAlternateAllele(0)); + builder.alleles(Collections.singleton(vc_input.getReference())).filter(BEAGLE_MONO_FILTER_STRING); } // re-compute chromosome counts From f46f7d9b23d22ac249fddbfacc4e748b61940ac9 Mon Sep 17 00:00:00 2001 From: James Warren Date: Fri, 14 Jun 2013 14:25:16 -0700 Subject: [PATCH 65/99] deducing dictionary path should not use global find and replace Signed-off-by: David Roazen --- .../sting/gatk/datasources/reference/ReferenceDataSource.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java index 01edd44ba..edd3d324c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java @@ -68,8 +68,8 @@ public class ReferenceDataSource { final File indexFile = new File(fastaFile.getAbsolutePath() + ".fai"); // determine the name for the dict file - final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? ".fa" : ".fasta"; - final File dictFile = new File(fastaFile.getAbsolutePath().replace(fastaExt, ".dict")); + final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? "\\.fa$" : "\\.fasta$"; + final File dictFile = new File(fastaFile.getAbsolutePath().replaceAll(fastaExt, ".dict")); // It's an error if either the fai or dict file does not exist. The user is now responsible // for creating these files. From e48f7544785437d52d2149ac82d5750e9c6746ac Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Thu, 13 Jun 2013 19:29:08 -0400 Subject: [PATCH 67/99] Fixes to several of the annotations for reduced reads (and other issues). 1. Have the RMSMappingQuality annotation take into account the fact that reduced reads represent multiple reads. 2. The rank sume tests should not be using reduced reads (because they do not represent distinct observations). 3. Fixed a massive bug in the BaseQualityRankSumTest annotation! It was not using the base qualities but rather the read likelihoods?! Added a unit test for Rank Sum Tests to prove that the distributions are correctly getting assigned appropriate p-values. Also, and just as importantly, the test shows that using reduced reads in the rank sum tests skews the results and makes insignificant distributions look significant (so it can falsely cause the filtering of good sites). Also included in this commit is a massive refactor of the RankSumTest class as requested by the reviewer. --- .../annotator/BaseQualityRankSumTest.java | 42 +---- .../annotator/ClippingRankSumTest.java | 31 +--- .../gatk/walkers/annotator/FisherStrand.java | 16 +- .../annotator/MappingQualityRankSumTest.java | 42 +---- .../walkers/annotator/RMSMappingQuality.java | 55 +++---- .../gatk/walkers/annotator/RankSumTest.java | 139 +++++++++++----- .../walkers/annotator/ReadPosRankSumTest.java | 95 ++++------- .../walkers/annotator/RankSumUnitTest.java | 151 ++++++++++++++++++ .../VariantAnnotatorIntegrationTest.java | 6 +- ...perGeneralPloidySuite1IntegrationTest.java | 2 +- ...perGeneralPloidySuite2IntegrationTest.java | 2 +- ...dGenotyperIndelCallingIntegrationTest.java | 16 +- ...GenotyperNormalCallingIntegrationTest.java | 8 +- ...dGenotyperReducedReadsIntegrationTest.java | 6 +- ...lexAndSymbolicVariantsIntegrationTest.java | 6 +- .../HaplotypeCallerIntegrationTest.java | 18 +-- 16 files changed, 373 insertions(+), 262 deletions(-) create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java index a3a9e50e9..534834d0e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java @@ -47,13 +47,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -71,37 +69,11 @@ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnot public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("BaseQRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities")); } - protected void fillQualsFromPileup(final List allAlleles, final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals){ - - if (alleleLikelihoodMap == null) { - // use fast SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(),true)) ) { - refQuals.add((double)p.getQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getQual()); - } - } - } - return; - } - - for (Map el : alleleLikelihoodMap.getLikelihoodMapValues()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - - - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getBaseQualities()[ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, refLoc, ReadUtils.ClippingTail.RIGHT_TAIL)]; } - + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getQual(); + } } \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java index 366512119..68e983bb8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java @@ -46,14 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -74,26 +71,12 @@ public class ClippingRankSumTest extends RankSumTest { public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ClippingRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases")); } - - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, final List refQuals, final List altQuals) { - // todo - only support non-pileup case for now, e.g. active-region based version - if (pileup != null || likelihoodMap == null) - return; - - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)AlignmentUtils.getNumHardClippedBases(read); } + protected Double getElementForPileupElement(final PileupElement p) { + // TODO - we only support the non-pileup case for now, e.g. an active-region based version + return null; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java index 957eb1aba..876dbf039 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java @@ -300,7 +300,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat for ( Map.Entry sample : stratifiedContexts.entrySet() ) { for (PileupElement p : sample.getValue().getBasePileup()) { - if ( ! RankSumTest.isUsableBase(p, false) ) // ignore deletions + if ( ! isUsableBase(p) ) // ignore deletions and bad MQ continue; if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider ) @@ -313,6 +313,20 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat return table; } + /** + * Can the base in this pileup element be used in comparative tests? + * + * @param p the pileup element to consider + * + * @return true if this base is part of a meaningful read for comparison, false otherwise + */ + private static boolean isUsableBase(final PileupElement p) { + return !( p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); + } + private static void updateTable(final int[][] table, final Allele allele, final GATKSAMRecord read, final Allele ref, final Allele alt, final int representativeCount) { final boolean matchesRef = allele.equals(ref, true); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java index 3873138a2..0ebb09961 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java @@ -47,14 +47,10 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -73,35 +69,11 @@ public class MappingQualityRankSumTest extends RankSumTest implements StandardAn public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("MQRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, - final List refQuals, final List altQuals) { - - if (pileup != null && likelihoodMap == null) { - // old UG snp-only path through the annotations - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)p.getMappingQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getMappingQual()); - } - } - } - return; - } - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - // BUGBUG: There needs to be a comparable isUsableBase check here - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)el.getKey().getMappingQuality()); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)el.getKey().getMappingQuality()); - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getMappingQuality(); } - } \ No newline at end of file + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getRead().getMappingQuality(); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java index 18348162e..d9bc5966c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java @@ -56,6 +56,7 @@ import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnota import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFConstants; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; @@ -77,55 +78,41 @@ public class RMSMappingQuality extends InfoFieldAnnotation implements StandardAn final Map stratifiedContexts, final VariantContext vc, final Map perReadAlleleLikelihoodMap ) { - int totalSize = 0, index = 0; - int qualities[]; - if (stratifiedContexts != null) { + + final List qualities = new ArrayList<>(); + if ( stratifiedContexts != null ) { if ( stratifiedContexts.size() == 0 ) return null; - for ( AlignmentContext context : stratifiedContexts.values() ) - totalSize += context.size(); - - qualities = new int[totalSize]; - - for ( Map.Entry sample : stratifiedContexts.entrySet() ) { - AlignmentContext context = sample.getValue(); - for (PileupElement p : context.getBasePileup() ) - index = fillMappingQualitiesFromPileupAndUpdateIndex(p.getRead(), index, qualities); + for ( final Map.Entry sample : stratifiedContexts.entrySet() ) { + final AlignmentContext context = sample.getValue(); + for ( final PileupElement p : context.getBasePileup() ) + fillMappingQualitiesFromPileup(p.getRead().getMappingQuality(), p.getRepresentativeCount(), qualities); } } else if (perReadAlleleLikelihoodMap != null) { if ( perReadAlleleLikelihoodMap.size() == 0 ) return null; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) - totalSize += perReadLikelihoods.size(); - - qualities = new int[totalSize]; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { - for (GATKSAMRecord read : perReadLikelihoods.getStoredElements()) - index = fillMappingQualitiesFromPileupAndUpdateIndex(read, index, qualities); - - - } + for ( final PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { + for ( final GATKSAMRecord read : perReadLikelihoods.getStoredElements() ) + fillMappingQualitiesFromPileup(read.getMappingQuality(), (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1), qualities); + } } else return null; - - - double rms = MathUtils.rms(qualities); - Map map = new HashMap(); - map.put(getKeyNames().get(0), String.format("%.2f", rms)); - return map; + final double rms = MathUtils.rms(qualities); + return Collections.singletonMap(getKeyNames().get(0), (Object)String.format("%.2f", rms)); } - private static int fillMappingQualitiesFromPileupAndUpdateIndex(final GATKSAMRecord read, final int inputIdx, final int[] qualities) { - int outputIdx = inputIdx; - if ( read.getMappingQuality() != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) - qualities[outputIdx++] = read.getMappingQuality(); - - return outputIdx; + private static void fillMappingQualitiesFromPileup(final int mq, final int representativeCount, final List qualities) { + if ( mq != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) { + if ( representativeCount == 1 ) + qualities.add(mq); + else + qualities.addAll(Collections.nCopies(representativeCount, mq)); + } } public List getKeyNames() { return Arrays.asList(VCFConstants.RMS_MAPPING_QUALITY_KEY); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java index ef456824e..37508fc06 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java @@ -53,9 +53,11 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.ActiveRegionBasedAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MannWhitneyU; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -87,31 +89,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR if (genotypes == null || genotypes.size() == 0) return null; - final ArrayList refQuals = new ArrayList(); - final ArrayList altQuals = new ArrayList(); + final ArrayList refQuals = new ArrayList<>(); + final ArrayList altQuals = new ArrayList<>(); for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) { - PerReadAlleleLikelihoodMap indelLikelihoodMap = null; - ReadBackedPileup pileup = null; + boolean usePileup = true; - if (stratifiedContexts != null) { // the old UG SNP-only path through the annotations - final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); - if ( context != null ) - pileup = context.getBasePileup(); + if ( stratifiedPerReadAlleleLikelihoodMap != null ) { + final PerReadAlleleLikelihoodMap likelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); + if ( likelihoodMap != null && !likelihoodMap.isEmpty() ) { + fillQualsFromLikelihoodMap(vc.getAlleles(), vc.getStart(), likelihoodMap, refQuals, altQuals); + usePileup = false; + } } - if (stratifiedPerReadAlleleLikelihoodMap != null ) - indelLikelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); - if (indelLikelihoodMap != null && indelLikelihoodMap.isEmpty()) - indelLikelihoodMap = null; - // treat an empty likelihood map as a null reference - will simplify contract with fillQualsFromPileup - if (indelLikelihoodMap == null && pileup == null) - continue; - - fillQualsFromPileup(vc.getAlleles(), vc.getStart(), pileup, indelLikelihoodMap, refQuals, altQuals ); + // the old UG SNP-only path through the annotations + if ( usePileup && stratifiedContexts != null ) { + final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); + if ( context != null ) { + final ReadBackedPileup pileup = context.getBasePileup(); + if ( pileup != null ) + fillQualsFromPileup(vc.getAlleles(), pileup, refQuals, altQuals); + } + } } - if (refQuals.isEmpty() && altQuals.isEmpty()) + + if ( refQuals.isEmpty() && altQuals.isEmpty() ) return null; final MannWhitneyU mannWhitneyU = new MannWhitneyU(useDithering); @@ -136,18 +140,72 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR // we are testing that set1 (the alt bases) have lower quality scores than set2 (the ref bases) final Pair testResults = mannWhitneyU.runOneSidedTest(MannWhitneyU.USet.SET1); - final Map map = new HashMap(); + final Map map = new HashMap<>(); if (!Double.isNaN(testResults.first)) map.put(getKeyNames().get(0), String.format("%.3f", testResults.first)); return map; } - protected abstract void fillQualsFromPileup(final List alleles, - final int refLoc, - final ReadBackedPileup readBackedPileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, - final List altQuals); + private void fillQualsFromPileup(final List alleles, + final ReadBackedPileup pileup, + final List refQuals, + final List altQuals) { + for ( final PileupElement p : pileup ) { + if ( isUsableBase(p) ) { + final Double value = getElementForPileupElement(p); + if ( value == null ) + continue; + + if ( alleles.get(0).equals(Allele.create(p.getBase(), true)) ) + refQuals.add(value); + else if ( alleles.contains(Allele.create(p.getBase())) ) + altQuals.add(value); + } + } + } + + private void fillQualsFromLikelihoodMap(final List alleles, + final int refLoc, + final PerReadAlleleLikelihoodMap likelihoodMap, + final List refQuals, + final List altQuals) { + for ( final Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet() ) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); + if ( ! a.isInformative() ) + continue; // read is non-informative + + final GATKSAMRecord read = el.getKey(); + if ( isUsableRead(read, refLoc) ) { + final Double value = getElementForRead(read, refLoc); + if ( value == null ) + continue; + + if ( a.getMostLikelyAllele().isReference() ) + refQuals.add(value); + else if ( alleles.contains(a.getMostLikelyAllele()) ) + altQuals.add(value); + } + } + } + + /** + * Get the element for the given read at the given reference position + * + * @param read the read + * @param refLoc the reference position + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected abstract Double getElementForRead(final GATKSAMRecord read, final int refLoc); + + // TODO -- until the ReadPosRankSumTest stops treating these differently, we need to have separate methods for GATKSAMRecords and PileupElements. Yuck. + + /** + * Get the element for the given read at the given reference position + * + * @param p the pileup element + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected abstract Double getElementForPileupElement(final PileupElement p); /** * Can the base in this pileup element be used in comparative tests between ref / alt bases? @@ -157,30 +215,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR * @param p the pileup element to consider * @return true if this base is part of a meaningful read for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p) { - return isUsableBase(p, false); + protected boolean isUsableBase(final PileupElement p) { + return !(p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE || // need the unBAQed quality score here + p.getRead().isReducedRead() ); } /** - * Can the base in this pileup element be used in comparative tests between ref / alt bases? + * Can the read be used in comparative tests between ref / alt bases? * - * @param p the pileup element to consider - * @param allowDeletions if true, allow p to be a deletion base - * @return true if this base is part of a meaningful read for comparison, false otherwise + * @param read the read to consider + * @param refLoc the reference location + * @return true if this read is meaningful for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p, final boolean allowDeletions) { - return !((! allowDeletions && p.isDeletion()) || - p.getMappingQual() == 0 || - p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || - ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return !( read.getMappingQuality() == 0 || + read.getMappingQuality() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + read.isReducedRead() ); } /** * Initialize the rank sum test annotation using walker and engine information. Right now this checks to see if * engine randomization is turned off, and if so does not dither. - * @param walker - * @param toolkit - * @param headerLines + * @param walker the walker + * @param toolkit the GATK engine + * @param headerLines the header lines */ public void initialize ( AnnotatorCompatible walker, GenomeAnalysisEngine toolkit, Set headerLines ) { useDithering = ! toolkit.getArguments().disableDithering; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java index 6ce4aab49..37faaed22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java @@ -51,17 +51,13 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -83,55 +79,34 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals) { + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); + if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) + return null; - if (alleleLikelihoodMap == null) { - // use old UG SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) && p.getRead().getCigar() != null ) { - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); - - readPos = getFinalReadPosition(p.getRead(),readPos); - - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)readPos); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)readPos); - } - } - } - return; - } - - for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - - final GATKSAMRecord read = el.getKey(); - if ( read.getSoftStart() + read.getCigar().getReadLength() <= refLoc ) { // make sure the read actually covers the requested ref loc - continue; - } - final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED || read.getCigar() == null ) - continue; - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); - final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); - if (readPos > numAlignedBases / 2) - readPos = numAlignedBases - (readPos + 1); - - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)readPos); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)readPos); - } + int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); + final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); + if (readPos > numAlignedBases / 2) + readPos = numAlignedBases - (readPos + 1); + return (double)readPos; } - int getFinalReadPosition(GATKSAMRecord read, int initialReadPosition) { + protected Double getElementForPileupElement(final PileupElement p) { + final int offset = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); + return (double)getFinalReadPosition(p.getRead(), offset); + } + + @Override + protected boolean isUsableBase(final PileupElement p) { + return super.isUsableBase(p) && p.getRead().getCigar() != null; + } + + @Override + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return super.isUsableRead(read, refLoc) && read.getSoftStart() + read.getCigar().getReadLength() > refLoc; + } + + private int getFinalReadPosition(final GATKSAMRecord read, final int initialReadPosition) { final int numAlignedBases = getNumAlignedBases(read); int readPos = initialReadPosition; @@ -141,7 +116,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return readPos; } - int getNumClippedBasesAtStart(SAMRecord read) { + + private int getNumClippedBasesAtStart(final SAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -151,8 +127,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (first.getOperator() == CigarOperator.H) { numStartClippedBases = first.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -167,11 +143,11 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return numStartClippedBases; } - int getNumAlignedBases(SAMRecord read) { + private int getNumAlignedBases(final GATKSAMRecord read) { return read.getReadLength() - getNumClippedBasesAtStart(read) - getNumClippedBasesAtEnd(read); } - int getNumClippedBasesAtEnd(SAMRecord read) { + private int getNumClippedBasesAtEnd(final GATKSAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -181,8 +157,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (last.getOperator() == CigarOperator.H) { numEndClippedBases = last.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -193,11 +169,6 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio break; } - return numEndClippedBases; } - - int getOffsetFromClippedReadStart(SAMRecord read, int offset) { - return offset - getNumClippedBasesAtStart(read); - } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java new file mode 100644 index 000000000..fec83e1a8 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java @@ -0,0 +1,151 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.*; +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts; +import org.broadinstitute.sting.utils.MannWhitneyU; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class RankSumUnitTest { + + List distribution20, distribution30, distribution20_40; + static final int observations = 100; + + @BeforeClass + public void init() { + distribution20 = new ArrayList<>(observations); + distribution30 = new ArrayList<>(observations); + distribution20_40 = new ArrayList<>(observations); + + final int skew = 3; + makeDistribution(distribution20, 20, skew, observations); + makeDistribution(distribution30, 30, skew, observations); + makeDistribution(distribution20_40, 20, skew, observations/2); + makeDistribution(distribution20_40, 40, skew, observations/2); + + // shuffle the observations + Collections.shuffle(distribution20); + Collections.shuffle(distribution30); + Collections.shuffle(distribution20_40); + } + + private static void makeDistribution(final List result, final int target, final int skew, final int numObservations) { + final int rangeStart = target - skew; + final int rangeEnd = target + skew; + + int current = rangeStart; + for ( int i = 0; i < numObservations; i++ ) { + result.add(current++); + if ( current > rangeEnd ) + current = rangeStart; + } + } + + @DataProvider(name = "DistributionData") + public Object[][] makeDistributionData() { + List tests = new ArrayList(); + + for ( final int numToReduce : Arrays.asList(0, 10, 50, 100) ) { + tests.add(new Object[]{distribution20, distribution20, numToReduce, true, "20-20"}); + tests.add(new Object[]{distribution30, distribution30, numToReduce, true, "30-30"}); + tests.add(new Object[]{distribution20_40, distribution20_40, numToReduce, true, "20/40-20/40"}); + + tests.add(new Object[]{distribution20, distribution30, numToReduce, false, "20-30"}); + tests.add(new Object[]{distribution30, distribution20, numToReduce, false, "30-20"}); + + tests.add(new Object[]{distribution20, distribution20_40, numToReduce, false, "20-20/40"}); + tests.add(new Object[]{distribution30, distribution20_40, numToReduce, true, "30-20/40"}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "DistributionData") + public void testDistribution(final List distribution1, final List distribution2, final int numToReduceIn2, final boolean distributionsShouldBeEqual, final String debugString) { + final MannWhitneyU mannWhitneyU = new MannWhitneyU(true); + + for ( final Integer num : distribution1 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET1); + + final List dist2 = new ArrayList<>(distribution2); + if ( numToReduceIn2 > 0 ) { + final org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts counts = new BaseCounts(); + for ( int i = 0; i < numToReduceIn2; i++ ) { + final int value = dist2.remove(0); + counts.incr(BaseIndex.A, (byte)value, 0, false); + } + + final int qual = (int)counts.averageQualsOfBase(BaseIndex.A); + for ( int i = 0; i < numToReduceIn2; i++ ) + dist2.add(qual); + } + + for ( final Integer num : dist2 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET2); + + final Double result = mannWhitneyU.runTwoSidedTest().second; + Assert.assertFalse(Double.isNaN(result)); + + if ( distributionsShouldBeEqual ) { + // TODO -- THIS IS THE FAILURE POINT OF USING REDUCED READS WITH RANK SUM TESTS + if ( numToReduceIn2 >= observations / 2 ) + return; + Assert.assertTrue(result > 0.1, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } else { + Assert.assertTrue(result < 0.01, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java index 961a28bcf..e7d7300ae 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java @@ -78,7 +78,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testHasAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("42889072698af972f2004ccfe8eae15e")); + Arrays.asList("823868a4b5b5ec2cdf080c059d04d31a")); executeTest("test file has annotations, asking for annotations, #1", spec); } @@ -112,7 +112,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testNoAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("7e755bb09169699b76850e76b71a5f5a")); + Arrays.asList("6f873b3152db291e18e3a04fbce2e117")); executeTest("test file doesn't have annotations, asking for annotations, #1", spec); } @@ -128,7 +128,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testExcludeAnnotations() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard -XA FisherStrand -XA ReadPosRankSumTest --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("e17596007d0db7673d138a9ae4890e82")); + Arrays.asList("552c2ad9dbfaa85d51d2def93c8229c6")); executeTest("test exclude annotations", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index c791d08ae..2d36a27d1 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -79,6 +79,6 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy1_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "353c97bfb05a939b3838dc8eee50326b"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "dd28b14d732852bffbba4f22f7697227"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java index 1022b6e15..117e54ef8 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java @@ -58,7 +58,7 @@ public class UnifiedGenotyperGeneralPloidySuite2IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy3_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","7e4e1397d5cff68aeba3595e671574fc"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","369ad0ff28bb9ce7974dc2c7343c8737"); } @Test(enabled = true) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 64a27c4c3..49d429c0d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -73,7 +73,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("14ad6eeed46e9b6f4757370267b1a1cc")); + Arrays.asList("ef8151aa699da3272c1ae0986d16ca21")); executeTest(String.format("test indel caller in SLX"), spec); } @@ -88,7 +88,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -minIndelCnt 1" + " -L 1:10,000,000-10,100,000", 1, - Arrays.asList("d9572a227ccb13a6baa6dc4fb65bc1e5")); + Arrays.asList("7f88229ccefb74513efb199b61183cb8")); executeTest(String.format("test indel caller in SLX with low min allele count"), spec); } @@ -101,7 +101,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("cd184a2a5a1932dcf3e8f0424652176b")); + Arrays.asList("1928ad48bcd0ca180e046bc235cfb3f4")); executeTest(String.format("test indel calling, multiple technologies"), spec); } @@ -111,7 +111,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("e8d98996eb81ece8cfb52437920ae2e0")); + Arrays.asList("6663e434a7b549f23bfd52db90e53a1a")); executeTest("test MultiSample Pilot2 indels with alleles passed in", spec); } @@ -121,7 +121,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { baseCommandIndels + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("23a78c16f64bffe1dea3a5587fcabdad")); + Arrays.asList("581c552664e536df6d0f102fb0d10e5a")); executeTest("test MultiSample Pilot2 indels with alleles passed in and emitting all sites", spec); } @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("facac578891a4f2be63ddd5ba6b9096b")); + Arrays.asList("587bf6bad368ed81189747a84f913ab2")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } @@ -176,7 +176,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction0() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.0", 1, - Arrays.asList("e90256acfc360fc4bf377094732a673a")); + Arrays.asList("862d82c8aa35f1da4f9e67b5b48dfe52")); executeTest("test minIndelFraction 0.0", spec); } @@ -184,7 +184,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction25() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.25", 1, - Arrays.asList("98abcfb0a008050eba8b9c285a25b2a0")); + Arrays.asList("8d9fc96be07db791737ac18135de4d63")); executeTest("test minIndelFraction 0.25", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index f7c5e6fd5..439039f9b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -64,7 +64,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultiSamplePilot1() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1, - Arrays.asList("474dfb943a307c86cabe2043970c58f3")); + Arrays.asList("a9466c1e3ce1fc4bac83086b25a6df54")); executeTest("test MultiSample Pilot1", spec); } @@ -96,7 +96,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("f576d86656cc37c0a869c7ac911f4c7c")); + Arrays.asList("70a21812d4dd6b72c44f60c74d508d5b")); executeTest("test Multiple SNP alleles", spec); } @@ -112,7 +112,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testReverseTrim() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1, - Arrays.asList("94d7a907fdca7e8c9fd6bb8a87b2bab2")); + Arrays.asList("f3da1ff1e49a831af055ca52d6d07dd7")); executeTest("test reverse trim", spec); } @@ -120,7 +120,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMismatchedPLs() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + privateTestDir + "mismatchedPLs.bam -o %s -L 1:24020341", 1, - Arrays.asList("94bfccbd06043e90ae1b1c66fc3afe07")); + Arrays.asList("20ff311f363c51b7385a76f6f296759c")); executeTest("test mismatched PLs", spec); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java index b9830de8e..33810e255 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java @@ -63,18 +63,18 @@ public class UnifiedGenotyperReducedReadsIntegrationTest extends WalkerTest { public void testReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("e6565060b44a7804935973efcd56e596")); + Arrays.asList("ffde0d5e23523e4bd9e7e18f62d37d0f")); executeTest("test calling on a ReducedRead BAM", spec); } @Test public void testReducedBamSNPs() { - testReducedCalling("SNP", "ab776d74c41ce2b859e2b2466a76204a"); + testReducedCalling("SNP", "e8de8c523751ad2fa2ee20185ba5dea7"); } @Test public void testReducedBamINDELs() { - testReducedCalling("INDEL", "22110b001e2d3dd45d7872334086b2b9"); + testReducedCalling("INDEL", "4b4902327fb132f9aaab3dd5ace934e1"); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index c1b8f8a70..0636d7c1b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "03944bbedb012e2ac2026a84baa0560c"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "4a3479fc4ad387d381593b328f737a1b"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "321dc9f3d330790bac7981ffae00cb0c"); + "b7a01525c00d02b3373513a668a43c6a"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "7e9f99d4cba8087dac66ea871b910d7e"); + "a2a42055b068334f415efb07d6bb9acd"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index da92f39fc..aca1172d4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -78,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "09d84bc1aef2dd9c185934752172b794"); + HCTest(CEUTRIO_BAM, "", "baa5a2eedc8f06ce9f8f98411ee09f8a"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "5c074930b27d1f5c942fe755c2a8be27"); + HCTest(NA12878_BAM, "", "f09e03d41238697b23f95716a12667cb"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -94,7 +94,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "005a6d1933913a5d96fc56d01303fa95"); + "130d36448faeb7b8d4bce4be12dacd3a"); } @Test @@ -110,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "9b6f667ad87e19c38d16fefe63c37484"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "7c20aa62633f4ce8ebf12950fbf05ec0"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -147,7 +147,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "6e170d03047caefc2fba3f1c1f8de132"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "0ddc56f0a0fbcfefda79aa20b2ecf603"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -186,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("a47ef09a8701128cfb301a83b7bb0728")); + Arrays.asList("5fe9310addf881bed4fde2354e59ce34")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -194,7 +194,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("0cb99f6bb3e630add4b3486c496fa508")); + Arrays.asList("26a9917f6707536636451266de0116c3")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } @@ -208,7 +208,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestDBSNPAnnotationWGS() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, - Arrays.asList("92f947cc89e4f50cf2ef3121d2fe308d")); + Arrays.asList("cc6f2a76ee97ecc14a5f956ffbb21d88")); executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); } @@ -217,7 +217,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + " -L " + hg19Intervals + " -isr INTERSECTION", 1, - Arrays.asList("91877c8ea3eb0e0316d9ad11fdcc1a87")); + Arrays.asList("51e91c8af61a6b47807165906baefb00")); executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); } } From b69d210255324d80cfca3986849a716492c76d1e Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 17 Jun 2013 10:50:07 -0400 Subject: [PATCH 68/99] Bugfix: allow gzip VCF output in multi-threaded GATK output -- VariantContextWriterStorage was gzipping the intermediate files that would be merged in, but the mergeInto function couldn't read those outputs, and we'd throw a very strange error. Now tmp. VCFs aren't compressed, even if the final VCF is. Added integrationtest to ensure this behavior works going forward. -- [delivers #47399279] --- .../storage/VariantContextWriterStorage.java | 24 +++++++++++++++---- .../gatk/EngineFeaturesIntegrationTest.java | 15 ++++++++++++ 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java index 84709d6d8..80841bae7 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java @@ -67,12 +67,16 @@ public class VariantContextWriterStorage implements Storage 0); + } } \ No newline at end of file From f6025d25aeabd7c52ef89c6202438f0a40199ee1 Mon Sep 17 00:00:00 2001 From: Guillermo del Angel Date: Thu, 16 May 2013 10:04:11 -0400 Subject: [PATCH 69/99] Feature requested by Reich lab and Paavo lab in Leipzig for ancient DNA processing: -- When doing cross-species comparisons and studying population history and ancient DNA data, having SOME measure of confidence is needed at every single site that doesn't depend on the reference base, even in a naive per-site SNP mode. Old versions of GATK provided GQ and some wrong PL values at reference sites but these were wrong. This commit addresses this need by adding a new UG command line argument, -allSitePLs, that, if enabled will: a) Emit all 3 ALT snp alleles in the ALT column. b) Emit all corresponding 10 PL values. It's up to the user to process these PL values downstream to make sense of these. Note that, in order to follow VCF spec, the QUAL field in a reference call when there are non-null ALT alleles present will be zero, so QUAL will be useless and filtering will need to be done based on other fields. -- Tweaks and fixes to processing pipelines for Reich lab. --- .../SNPGenotypeLikelihoodsCalculationModel.java | 12 ++++++++++-- .../genotyper/UnifiedArgumentCollection.java | 17 ++++++++++++++++- .../genotyper/UnifiedGenotyperEngine.java | 12 ++++++++++-- .../UnifiedGenotyperIntegrationTest.java | 8 ++++++++ .../picard/CollectGcBiasMetrics.scala | 3 +-- 5 files changed, 45 insertions(+), 7 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java index ce5f94478..360f88e51 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java @@ -147,9 +147,17 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC // if we only want variants, then we don't need to calculate genotype likelihoods if ( UAC.OutputMode == UnifiedGenotyperEngine.OUTPUT_MODE.EMIT_VARIANTS_ONLY ) return builder.make(); + // if user requires all PLs at all sites, add all possible alt alleles + else if (UAC.annotateAllSitesWithPLs) { + for ( final byte base : BaseUtils.BASES ) { + if ( base != refBase ) + alleles.add(Allele.create(base)); + } + } - // otherwise, choose any alternate allele (it doesn't really matter) - alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); + else + // otherwise, choose any alternate allele (it doesn't really matter) + alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java index e346b10b7..b96b5733f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java @@ -52,6 +52,9 @@ import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.VariantContext; +import java.util.Collections; +import java.util.List; + public class UnifiedArgumentCollection extends StandardCallerArgumentCollection { @Argument(fullName = "genotype_likelihoods_model", shortName = "glm", doc = "Genotype likelihoods calculation model to employ -- SNP is the default option, while INDEL is also available for calling indels and BOTH is available for calling both together", required = false) @@ -95,6 +98,18 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection @Argument(fullName = "max_deletion_fraction", shortName = "deletions", doc = "Maximum fraction of reads with deletions spanning this locus for it to be callable [to disable, set to < 0 or > 1; default:0.05]", required = false) public Double MAX_DELETION_FRACTION = 0.05; + /** + * Advanced, experimental argument: if SNP likelihood model is specified, and if EMIT_ALL_SITES output mode is set, when we set this argument then we will also emit PLs at all sites. + * This will give a measure of reference confidence and a measure of which alt alleles are more plausible (if any). + * WARNINGS: + * - This feature will inflate VCF file size considerably. + * - All SNP ALT alleles will be emitted with corresponding 10 PL values. + * - An error will be emitted if EMIT_ALL_SITES is not set, or if anything other than diploid SNP model is used + */ + @Advanced + @Argument(fullName = "allSitePLs", shortName = "allSitePLs", doc = "Annotate all sites with PLs", required = false) + public boolean annotateAllSitesWithPLs = false; + // indel-related arguments /** * A candidate indel is genotyped (and potentially called) if there are this number of reads with a consensus indel at a site. @@ -247,7 +262,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection this.EXCLUDE_FILTERED_REFERENCE_SITES = uac.EXCLUDE_FILTERED_REFERENCE_SITES; this.IGNORE_LANE_INFO = uac.IGNORE_LANE_INFO; this.pairHMM = uac.pairHMM; - + this.annotateAllSitesWithPLs = uac.annotateAllSitesWithPLs; // todo- arguments to remove this.IGNORE_SNP_ALLELES = uac.IGNORE_SNP_ALLELES; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java index 3d9f75d45..9f3368cf8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java @@ -168,6 +168,13 @@ public class UnifiedGenotyperEngine { filter.add(LOW_QUAL_FILTER_NAME); determineGLModelsToUse(); + + // do argument checking + if (UAC.annotateAllSitesWithPLs) { + if (!modelsToUse.contains(GenotypeLikelihoodsCalculationModel.Model.SNP)) + throw new IllegalArgumentException("Invalid genotype likelihood model specification: Only diploid SNP model can be used in conjunction with option allSitePLs"); + + } } /** @@ -439,7 +446,8 @@ public class UnifiedGenotyperEngine { bestGuessIsRef = false; } // if in GENOTYPE_GIVEN_ALLELES mode, we still want to allow the use of a poor allele - else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { + else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.annotateAllSitesWithPLs) { myAlleles.add(alternateAllele); alleleCountsofMLE.add(AFresult.getAlleleCountAtMLE(alternateAllele)); } @@ -449,7 +457,7 @@ public class UnifiedGenotyperEngine { // note the math.abs is necessary because -10 * 0.0 => -0.0 which isn't nice final double phredScaledConfidence = - Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES + Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || UAC.annotateAllSitesWithPLs ? -10 * AFresult.getLog10PosteriorOfAFEq0() : -10 * AFresult.getLog10PosteriorOfAFGT0()); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index 300d7f5da..3eb9b4e1c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -156,6 +156,14 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { } + @Test + public void emitPLsAtAllSites() { + WalkerTest.WalkerTestSpec spec1 = new WalkerTest.WalkerTestSpec( + baseCommand + " -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -o %s -L 1:10,000,000-10,010,000 --output_mode EMIT_ALL_SITES -allSitePLs", 1, + Arrays.asList("7cc55db8693759e059a05bc4398f6f69")); + executeTest("test all site PLs 1", spec1); + + } // -------------------------------------------------------------------------------------------------------------- // // testing heterozygosity diff --git a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala index 5d887016e..7c4c3f26a 100644 --- a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala +++ b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala @@ -52,6 +52,5 @@ class CollectGcBiasMetrics extends org.broadinstitute.sting.queue.function.JavaC override def commandLine = super.commandLine + required("SUMMARY_OUTPUT=" + output) + required("CHART_OUTPUT=" + output+".pdf") + - required("REFERENCE_SEQUENCE=" + reference) + - required("ASSUME_SORTED=true") + required("REFERENCE_SEQUENCE=" + reference) } From 7b22467148e7a8851323402d93b9e357a5b11fc8 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 17 Jun 2013 13:35:04 -0400 Subject: [PATCH 70/99] Bugfix: defaultBaseQualities actually works now -- It was being applied in the wrong order (after the first call to the underlying MalformedReadFilter) so if your first read was malformed you'd blow up there instead of being fixed properly. Added integration tests to ensure this continues to work. -- [delivers #49538319] --- .../arguments/GATKArgumentCollection.java | 12 ++++---- .../gatk/datasources/reads/SAMDataSource.java | 8 +++--- .../gatk/filters/MalformedReadFilter.java | 5 +++- .../gatk/EngineFeaturesIntegrationTest.java | 28 +++++++++++++++++++ 4 files changed, 42 insertions(+), 11 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index 0b1f341f0..b5113fdea 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -187,6 +187,12 @@ public class GATKArgumentCollection { @Argument(fullName = "allow_potentially_misencoded_quality_scores", shortName="allowPotentiallyMisencodedQuals", doc="Do not fail when encountering base qualities that are too high and that seemingly indicate a problem with the base quality encoding of the BAM file", required = false) public boolean ALLOW_POTENTIALLY_MISENCODED_QUALS = false; + @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) + public Boolean useOriginalBaseQualities = false; + + @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) + public byte defaultBaseQualities = -1; + // -------------------------------------------------------------------------------------------------------------- // // performance log arguments @@ -201,9 +207,6 @@ public class GATKArgumentCollection { @Argument(fullName = "performanceLog", shortName="PF", doc="If provided, a GATK runtime performance log will be written to this file", required = false) public File performanceLog = null; - @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) - public Boolean useOriginalBaseQualities = false; - // -------------------------------------------------------------------------------------------------------------- // // BQSR arguments @@ -267,9 +270,6 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) - public byte defaultBaseQualities = -1; - @Argument(fullName = "validation_strictness", shortName = "S", doc = "How strict should we be with validation", required = false) public SAMFileReader.ValidationStringency strictnessLevel = SAMFileReader.ValidationStringency.SILENT; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index bf25582ab..2f934e8df 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -630,6 +630,10 @@ public class SAMDataSource { // * (otherwise we will process something that we may end up throwing away) * // // ************************************************************************************************ // + if (useOriginalBaseQualities || defaultBaseQualities >= 0) + // only wrap if we are replacing the original qualities or using a default base quality + wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); + // Filters: wrappedIterator = StingSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters)); @@ -654,10 +658,6 @@ public class SAMDataSource { if (!noValidationOfReadOrder && enableVerification) wrappedIterator = new VerifyingSamIterator(wrappedIterator); - if (useOriginalBaseQualities || defaultBaseQualities >= 0) - // only wrap if we are replacing the original qualities or using a default base quality - wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - // set up read transformers for ( final ReadTransformer readTransformer : readTransformers ) { if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT ) diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java index a15870a22..3167ba139 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java @@ -234,7 +234,10 @@ public class MalformedReadFilter extends ReadFilter { else if (filterMismatchingBaseAndQuals) result = false; else - throw new UserException.MalformedBAM(read, String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals]", read.getReadName(), read.getReadLength(), read.getBaseQualities().length)); + throw new UserException.MalformedBAM(read, + String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals].%s", + read.getReadName(), read.getReadLength(), read.getBaseQualities().length, + read.getBaseQualities().length == 0 ? " You can use --defaultBaseQualities to assign a default base quality for all reads, but this can be dangerous in you don't know what you are doing." : "")); return result; } diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 736989418..fe30b60fd 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -227,4 +227,32 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { nLines++; Assert.assertTrue(nLines > 0); } + + // -------------------------------------------------------------------------------- + // + // Test that defaultBaseQualities actually works + // + // -------------------------------------------------------------------------------- + + public WalkerTestSpec testDefaultBaseQualities(final Integer value, final String md5) { + return new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I " + privateTestDir + "/baseQualitiesToFix.bam -o %s" + + (value != null ? " --defaultBaseQualities " + value : ""), + 1, Arrays.asList(md5)); + } + + @Test() + public void testDefaultBaseQualities20() { + executeTest("testDefaultBaseQualities20", testDefaultBaseQualities(20, "7d254a9d0ec59c66ee3e137f56f4c78f")); + } + + @Test() + public void testDefaultBaseQualities30() { + executeTest("testDefaultBaseQualities30", testDefaultBaseQualities(30, "0f50def6cbbbd8ccd4739e2b3998e503")); + } + + @Test(expectedExceptions = Exception.class) + public void testDefaultBaseQualitiesNoneProvided() { + executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, "")); + } + } \ No newline at end of file From cb5b1c3c343bc3a02764746f66d91ff7bb2e9975 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 17 Jun 2013 16:03:45 -0300 Subject: [PATCH 71/99] Create README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 000000000..13b3c0c6e --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +gsa-unstable +============ +See http://www.broadinstitute.org/gatk/ From 8511c4385c3b47d0568d6fe404086ddcae5cd8ee Mon Sep 17 00:00:00 2001 From: Ryan Poplin Date: Mon, 17 Jun 2013 14:02:54 -0400 Subject: [PATCH 72/99] Adding new pruning parameter to ReadThreadingAssembler -- numPruningSamples allows one to specify that the minPruning factor must be met by this many samples for a path to be considered good (e.g. seen twice in three samples). By default this is just one sample. -- adding unit test to test this new functionality --- .../haplotypecaller/HaplotypeCaller.java | 8 ++-- .../graphs/MultiSampleEdge.java | 39 ++++++++++-------- .../readthreading/ReadThreadingAssembler.java | 8 ++-- .../readthreading/ReadThreadingGraph.java | 33 ++++++++++----- .../graphs/MultiSampleEdgeUnitTest.java | 40 ++++++++++++------- 5 files changed, 80 insertions(+), 48 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index b94b74748..9b9c3924b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -80,8 +80,6 @@ import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; -import org.broadinstitute.sting.utils.fragments.FragmentCollection; -import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.haplotype.*; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; @@ -270,6 +268,10 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="dontIncreaseKmerSizesForCycles", shortName="dontIncreaseKmerSizesForCycles", doc="Should we disable the iterating over kmer sizes when graph cycles are detected?", required = false) protected boolean dontIncreaseKmerSizesForCycles = false; + @Advanced + @Argument(fullName="numPruningSamples", shortName="numPruningSamples", doc="The number of samples that must pass the minPuning factor in order for the path to be kept", required = false) + protected int numPruningSamples = 1; + /** * Assembly graph can be quite complex, and could imply a very large number of possible haplotypes. Each haplotype * considered requires N PairHMM evaluations if there are N reads across all samples. In order to control the @@ -539,7 +541,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final int maxAllowedPathsForReadThreadingAssembler = Math.max(maxPathsPerSample * nSamples, MIN_PATHS_PER_GRAPH); assemblyEngine = useDebruijnAssembler ? new DeBruijnAssembler(minKmerForDebruijnAssembler, onlyUseKmerSizeForDebruijnAssembler) - : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes, dontIncreaseKmerSizesForCycles); + : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes, dontIncreaseKmerSizesForCycles, numPruningSamples); assemblyEngine.setErrorCorrectKmers(errorCorrectKmers); assemblyEngine.setPruneFactor(MIN_PRUNE_FACTOR); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java index c1937e5c8..978d83eb4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java @@ -46,6 +46,8 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; +import java.util.PriorityQueue; + /** * edge class for connecting nodes in the graph that tracks some per-sample information * @@ -63,32 +65,43 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; * e.getPruningMultiplicity() // = 3 */ public class MultiSampleEdge extends BaseEdge { - private int maxSingleSampleMultiplicity, currentSingleSampleMultiplicity; + private int currentSingleSampleMultiplicity; + private final int singleSampleCapacity; + private final PriorityQueue singleSampleMultiplicities; /** * Create a new MultiSampleEdge with weight multiplicity and, if isRef == true, indicates a path through the reference * * @param isRef indicates whether this edge is a path through the reference * @param multiplicity the number of observations of this edge in this sample + * @param singleSampleCapacity the max number of samples to track edge multiplicities */ - public MultiSampleEdge(final boolean isRef, final int multiplicity) { + public MultiSampleEdge(final boolean isRef, final int multiplicity, final int singleSampleCapacity) { super(isRef, multiplicity); - maxSingleSampleMultiplicity = multiplicity; + + if( singleSampleCapacity <= 0 ) { throw new IllegalArgumentException("singleSampleCapacity must be > 0 but found: " + singleSampleCapacity); } + singleSampleMultiplicities = new PriorityQueue<>(singleSampleCapacity); + singleSampleMultiplicities.add(multiplicity); currentSingleSampleMultiplicity = multiplicity; + this.singleSampleCapacity = singleSampleCapacity; } @Override public MultiSampleEdge copy() { - return new MultiSampleEdge(isRef(), getMultiplicity()); // TODO -- should I copy values for other features? + return new MultiSampleEdge(isRef(), getMultiplicity(), singleSampleCapacity); // TODO -- should I copy values for other features? } /** - * update the max single sample multiplicity based on the current single sample multiplicity, and + * update the single sample multiplicities by adding the current single sample multiplicity to the priority queue, and * reset the current single sample multiplicity to 0. */ public void flushSingleSampleMultiplicity() { - if ( currentSingleSampleMultiplicity > maxSingleSampleMultiplicity ) - maxSingleSampleMultiplicity = currentSingleSampleMultiplicity; + singleSampleMultiplicities.add(currentSingleSampleMultiplicity); + if( singleSampleMultiplicities.size() == singleSampleCapacity + 1 ) { + singleSampleMultiplicities.poll(); // remove the lowest multiplicity from the list + } else if( singleSampleMultiplicities.size() > singleSampleCapacity + 1 ) { + throw new IllegalStateException("Somehow the per sample multiplicity list has grown too big: " + singleSampleMultiplicities); + } currentSingleSampleMultiplicity = 0; } @@ -100,20 +113,12 @@ public class MultiSampleEdge extends BaseEdge { @Override public int getPruningMultiplicity() { - return getMaxSingleSampleMultiplicity(); + return singleSampleMultiplicities.peek(); } @Override public String getDotLabel() { - return super.getDotLabel() + "/" + getMaxSingleSampleMultiplicity(); - } - - /** - * Get the maximum multiplicity for this edge seen in any single sample - * @return an integer >= 0 - */ - public int getMaxSingleSampleMultiplicity() { - return maxSingleSampleMultiplicity; + return super.getDotLabel() + "/" + getPruningMultiplicity(); } /** only provided for testing purposes */ diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index fc0f781c5..672c61c0f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -71,6 +71,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { private final int maxAllowedPathsForReadThreadingAssembler; private final boolean dontIncreaseKmerSizesForCycles; + private final int numPruningSamples; private boolean requireReasonableNumberOfPaths = false; protected boolean removePathsNotConnectedToRef = true; private boolean justReturnRawGraph = false; @@ -80,15 +81,16 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { this(DEFAULT_NUM_PATHS_PER_GRAPH, Arrays.asList(25)); } - public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes, final boolean dontIncreaseKmerSizesForCycles) { + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes, final boolean dontIncreaseKmerSizesForCycles, final int numPruningSamples) { super(maxAllowedPathsForReadThreadingAssembler); this.kmerSizes = kmerSizes; this.maxAllowedPathsForReadThreadingAssembler = maxAllowedPathsForReadThreadingAssembler; this.dontIncreaseKmerSizesForCycles = dontIncreaseKmerSizesForCycles; + this.numPruningSamples = numPruningSamples; } public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { - this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true); + this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true, 1); } /** for testing purposes */ @@ -139,7 +141,7 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { final int kmerSize, final List activeAlleleHaplotypes, final boolean allowLowComplexityGraphs) { - final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly); + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly, numPruningSamples); // add the reference sequence to the graph rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index 0844f979b..7d7df2c06 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -67,13 +67,24 @@ import java.util.*; public class ReadThreadingGraph extends BaseGraph { /** - * Edge factory that creates non-reference multiplicity 1 edges + * Edge factory that encapsulates the numPruningSamples assembly parameter */ private static class MyEdgeFactory implements EdgeFactory { - @Override - public MultiSampleEdge createEdge(MultiDeBruijnVertex sourceVertex, MultiDeBruijnVertex targetVertex) { - return new MultiSampleEdge(false, 1); + final int numPruningSamples; + + public MyEdgeFactory(int numPruningSamples) { + this.numPruningSamples = numPruningSamples; } + + @Override + public MultiSampleEdge createEdge(final MultiDeBruijnVertex sourceVertex, final MultiDeBruijnVertex targetVertex) { + return new MultiSampleEdge(false, 1, numPruningSamples); + } + + public MultiSampleEdge createEdge(final boolean isRef, final int multiplicity) { + return new MultiSampleEdge(isRef, multiplicity, numPruningSamples); + } + } private final static Logger logger = Logger.getLogger(ReadThreadingGraph.class); @@ -88,7 +99,7 @@ public class ReadThreadingGraph extends BaseGraph> pending = new LinkedHashMap>(); + private final Map> pending = new LinkedHashMap<>(); /** * A set of non-unique kmers that cannot be used as merge points in the graph @@ -117,19 +128,19 @@ public class ReadThreadingGraph extends BaseGraph= 1 */ - protected ReadThreadingGraph(final int kmerSize, final boolean debugGraphTransformations, final byte minBaseQualityToUseInAssembly) { - super(kmerSize, new MyEdgeFactory()); + protected ReadThreadingGraph(final int kmerSize, final boolean debugGraphTransformations, final byte minBaseQualityToUseInAssembly, final int numPruningSamples) { + super(kmerSize, new MyEdgeFactory(numPruningSamples)); if ( kmerSize < 1 ) throw new IllegalArgumentException("bad minkKmerSize " + kmerSize); this.kmerSize = kmerSize; @@ -324,7 +335,7 @@ public class ReadThreadingGraph extends BaseGraph countsPerSample; + final int numSamplesPruning; + public MultiplicityTestProvider(final List countsPerSample, final int numSamplesPruning) { + this.countsPerSample = countsPerSample; + this.numSamplesPruning = numSamplesPruning; + } + } + @DataProvider(name = "MultiplicityData") public Object[][] makeMultiplicityData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final List countsPerSample = Arrays.asList(0, 1, 2, 3, 4, 5); - for ( final int nSamples : Arrays.asList(1, 2, 3, 4, 5)) { - for ( final List perm : Utils.makePermutations(countsPerSample, nSamples, false) ) { - tests.add(new Object[]{perm}); + for ( final int numSamplesPruning : Arrays.asList(1, 2, 3) ) { + for ( final int nSamples : Arrays.asList(1, 2, 3, 4, 5)) { + for ( final List perm : Utils.makePermutations(countsPerSample, nSamples, false) ) { + tests.add(new Object[]{new MultiplicityTestProvider(perm, numSamplesPruning)}); + } } } @@ -77,15 +87,15 @@ public class MultiSampleEdgeUnitTest extends BaseTest { * Example testng test using MyDataProvider */ @Test(dataProvider = "MultiplicityData") - public void testMultiplicity(final List countsPerSample) { - final MultiSampleEdge edge = new MultiSampleEdge(false, 0); + public void testMultiplicity(final MultiplicityTestProvider cfg) { + final MultiSampleEdge edge = new MultiSampleEdge(false, 0, cfg.numSamplesPruning); Assert.assertEquals(edge.getMultiplicity(), 0); Assert.assertEquals(edge.getPruningMultiplicity(), 0); int total = 0; - for ( int i = 0; i < countsPerSample.size(); i++ ) { + for ( int i = 0; i < cfg.countsPerSample.size(); i++ ) { int countForSample = 0; - for ( int count = 0; count < countsPerSample.get(i); count++ ) { + for ( int count = 0; count < cfg.countsPerSample.get(i); count++ ) { edge.incMultiplicity(1); total++; countForSample++; @@ -95,9 +105,11 @@ public class MultiSampleEdgeUnitTest extends BaseTest { edge.flushSingleSampleMultiplicity(); } - final int max = MathUtils.arrayMax(ArrayUtils.toPrimitive(countsPerSample.toArray(new Integer[countsPerSample.size()]))); + ArrayList counts = new ArrayList<>(cfg.countsPerSample); + counts.add(0); + Collections.sort(counts); + final int prune = counts.get(Math.max(counts.size() - cfg.numSamplesPruning, 0)); Assert.assertEquals(edge.getMultiplicity(), total); - Assert.assertEquals(edge.getPruningMultiplicity(), max); - Assert.assertEquals(edge.getMaxSingleSampleMultiplicity(), max); + Assert.assertEquals(edge.getPruningMultiplicity(), prune); } } From f176c854c684dd2412b59c0767d344e62918d0be Mon Sep 17 00:00:00 2001 From: Guillermo del Angel Date: Thu, 13 Jun 2013 13:27:06 -0400 Subject: [PATCH 73/99] Swapping in logless Pair HMM for default usage with UG: -- Changed default HMM model. -- Removed check. -- Changed md5's: PL's in the high 100s change by a point or two due to new implementation. -- Resulting performance improvement is about 30 to 50% less runtime when using -glm INDEL. --- .../IndelGenotypeLikelihoodsCalculationModel.java | 12 +++++++----- .../walkers/genotyper/UnifiedArgumentCollection.java | 2 +- .../gatk/walkers/indels/PairHMMIndelErrorModel.java | 10 +++++----- ...dGenotyperGeneralPloidySuite1IntegrationTest.java | 2 +- ...dGenotyperGeneralPloidySuite2IntegrationTest.java | 2 +- .../UnifiedGenotyperIndelCallingIntegrationTest.java | 2 +- ...UnifiedGenotyperNormalCallingIntegrationTest.java | 2 +- 7 files changed, 17 insertions(+), 15 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java index c6e9ea379..0f3f7739d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java @@ -76,7 +76,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood private List alleleList = new ArrayList(); - protected IndelGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) { + protected IndelGenotypeLikelihoodsCalculationModel(final UnifiedArgumentCollection UAC, + final Logger logger) { super(UAC, logger); pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY, UAC.OUTPUT_DEBUG_INDEL_INFO, UAC.pairHMM); @@ -85,10 +86,11 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES; } - protected static List computeConsensusAlleles(ReferenceContext ref, - Map contexts, - AlignmentContextUtils.ReadOrientation contextType, - GenomeLocParser locParser, UnifiedArgumentCollection UAC) { + protected static List computeConsensusAlleles(final ReferenceContext ref, + final Map contexts, + final AlignmentContextUtils.ReadOrientation contextType, + final GenomeLocParser locParser, + final UnifiedArgumentCollection UAC) { ConsensusAlleleCounter counter = new ConsensusAlleleCounter(locParser, true, UAC.MIN_INDEL_COUNT_FOR_GENOTYPING, UAC.MIN_INDEL_FRACTION_PER_SAMPLE); return counter.computeConsensusAlleles(ref, contexts, contextType); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java index b96b5733f..f156468cc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java @@ -85,7 +85,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection * The PairHMM implementation to use for -glm INDEL genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. */ @Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for -glm INDEL genotype likelihood calculations", required = false) - public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.ORIGINAL; + public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING; /** * The minimum confidence needed in a given base for it to be used in variant calling. Note that the base quality of a base diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java index 7b444c4bd..c77557da6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java @@ -54,6 +54,7 @@ import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; @@ -116,12 +117,11 @@ public class PairHMMIndelErrorModel { case ORIGINAL: pairHMM = new Log10PairHMM(false); break; - case LOGLESS_CACHING: //TODO: still not tested so please do not use yet - //pairHMM = new LoglessCachingPairHMM(); //TODO - add it back when the figure out how to use the protected LoglessCachingPairHMM class - throw new UserException.BadArgumentValue("pairHMM"," this option (LOGLESS_CACHING in UG) is still under development"); - //break; + case LOGLESS_CACHING: + pairHMM = new LoglessPairHMM(); + break; default: - throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING (the third option is still under development)."); + throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING."); } // fill gap penalty table, affine naive model: diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index 2d36a27d1..aaa3b1284 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -79,6 +79,6 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy1_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "dd28b14d732852bffbba4f22f7697227"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "98f4d78aad745c6e853b81b2e4e207b4"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java index 117e54ef8..0eb89adc7 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java @@ -58,7 +58,7 @@ public class UnifiedGenotyperGeneralPloidySuite2IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy3_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","369ad0ff28bb9ce7974dc2c7343c8737"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","25902d7a6a0c00c60c2d5845dfaa1a4c"); } @Test(enabled = true) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 49d429c0d..65a569cdc 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("587bf6bad368ed81189747a84f913ab2")); + Arrays.asList("5596851d19582dd1af3901b7d703ae0a")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index 439039f9b..1bfbbac17 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -96,7 +96,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("70a21812d4dd6b72c44f60c74d508d5b")); + Arrays.asList("06c85e8eab08b67244cf38fc785aca22")); executeTest("test Multiple SNP alleles", spec); } From 15171c07a85254ead28bfc75c5ffe9203378306f Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 19 Jun 2013 11:10:36 -0400 Subject: [PATCH 74/99] CatVariants accepts reference files ending in any standard extension -- [resolves #49339235] Make CatVariants accept reference files ending in .fa (not only .fasta) --- .../broadinstitute/sting/tools/CatVariants.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java index ad77b2548..b59786d15 100644 --- a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java +++ b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java @@ -144,15 +144,13 @@ public class CatVariants extends CommandLineProgram { BasicConfigurator.configure(); logger.setLevel(Level.INFO); - if ( ! refFile.getName().endsWith(".fasta")) { - throw new UserException("Reference file "+refFile+"name must end with .fasta"); + final ReferenceSequenceFile ref; + try { + ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); + } catch ( Exception e ) { + throw new UserException("Couldn't load provided reference sequence file " + refFile, e); } - if ( ! refFile.exists() ) { - throw new UserException(String.format("Reference file %s does not exist", refFile.getAbsolutePath())); - } - - // Comparator>> comparator = new PositionComparator(); Comparator> positionComparator = new PositionComparator(); @@ -203,8 +201,6 @@ public class CatVariants extends CommandLineProgram { if (!(outputFile.getName().endsWith(".vcf") || outputFile.getName().endsWith(".VCF"))){ throw new UserException(String.format("Output file %s should be .vcf", outputFile)); } - ReferenceSequenceFile ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); - FileOutputStream outputStream = new FileOutputStream(outputFile); EnumSet options = EnumSet.of(Options.INDEX_ON_THE_FLY); From af275fdf100f03c78f12e1944b979cbab33c0e69 Mon Sep 17 00:00:00 2001 From: Chris Hartl Date: Wed, 12 Jun 2013 13:54:30 -0400 Subject: [PATCH 75/99] Extend the documentation of GenotypeConcordance to include notes about Monomorphic and Filtered VCF records. Address Geraldine's comments - information on moltenization and explanation of fields Fix paren --- .../variantutils/GenotypeConcordance.java | 52 ++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java index 10397d718..da8b20c66 100755 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java @@ -67,8 +67,58 @@ import java.util.*; * *

Output

* Genotype Concordance writes a GATK report to the specified file (via -o) , consisting of multiple tables of counts - * and proportions. These tables may be optionally moltenized via the -moltenize argument. + * and proportions. These tables may be optionally moltenized via the -moltenize argument. That is, the standard table * + * Sample NO_CALL_HOM_REF NO_CALL_HET NO_CALL_HOM_VAR (...) + * NA12878 0.003 0.001 0.000 (...) + * NA12891 0.005 0.000 0.000 (...) + * + * would instead be displayed + * + * NA12878 NO_CALL_HOM_REF 0.003 + * NA12878 NO_CALL_HET 0.001 + * NA12878 NO_CALL_HOM_VAR 0.000 + * NA12891 NO_CALL_HOM_REF 0.005 + * NA12891 NO_CALL_HET 0.000 + * NA12891 NO_CALL_HOM_VAR 0.000 + * (...) + * + * + * These tables are constructed on a per-sample basis, and include counts of eval vs comp genotype states, and the + * number of times the alternate alleles between the eval and comp sample did not match up. + * + * In addition, Genotype Concordance produces site-level allelic concordance. For strictly bi-allelic VCFs, + * only the ALLELES_MATCH, EVAL_ONLY, TRUTH_ONLY fields will be populated, but where multi-allelic sites are involved + * counts for EVAL_SUBSET_TRUTH and EVAL_SUPERSET_TRUTH will be generated. + * + * For example, in the following situation + * eval: ref - A alt - C + * comp: ref - A alt - C,T + * then the site is tabulated as EVAL_SUBSET_TRUTH. Were the situation reversed, it would be EVAL_SUPERSET_TRUTH. + * However, in the case where eval has both C and T alternate alleles, both must be observed in the genotypes + * (that is, there must be at least one of (0/1,1/1) and at least one of (0/2,1/2,2/2) in the genotype field). If + * one of the alleles has no observations in the genotype fields of the eval, the site-level concordance is + * tabulated as though that allele were not present in the record. + * + *

Monomorphic Records

+ * A site which has an alternate allele, but which is monomorphic in samples, is treated as not having been + * discovered, and will be recorded in the TRUTH_ONLY column (if a record exists in the comp VCF), or not at all + * (if no record exists in the comp VCF). + * + * That is, in the situation + * eval: ref - A alt - C genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * is equivalent to + * eval: ref - A alt - . genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * + * When a record is present in the comp VCF the *genotypes* for the monomorphic site will still be used to evaluate + * per-sample genotype concordance counts. + * + *

Filtered Records

+ * Filtered records are treated as though they were not present in the VCF, unless -ignoreSiteFilters is provided, + * in which case all records are used. There is currently no way to assess concordance metrics on filtered sites + * exclusively. SelectVariants can be used to extract filtered sites, and VariantFiltration used to un-filter them. */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} ) public class GenotypeConcordance extends RodWalker>,ConcordanceMetrics> { From 0be788f0f9ab0212d8a9eb91f995502ebe1d2b62 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Wed, 19 Jun 2013 13:15:24 -0400 Subject: [PATCH 76/99] Fix typo in snpEff documentation --- .../org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java index 288196d1b..8c068d3e4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java @@ -49,7 +49,7 @@ import java.util.regex.Pattern; * *

See http://snpeff.sourceforge.net/ for more information on the SnpEff tool

. * - *

For each variant, this tol chooses one of the effects of highest biological impact from the SnpEff + *

For each variant, this tool chooses one of the effects of highest biological impact from the SnpEff * output file (which must be provided on the command line via --snpEffFile filename.vcf), * and adds annotations on that effect.

* From 23ee192d5ef969675b3cf1d5396fb9bb3353f4f8 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Wed, 19 Jun 2013 13:22:44 -0400 Subject: [PATCH 77/99] PrintReads: remove -ds argument -This argument was completely redundant with the engine-level -dfrac argument. -Could produce unintended consequences if used in conjunction with engine-level downsampling arguments. --- .../sting/gatk/walkers/readutils/PrintReads.java | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java index a28523369..c7ed0bffd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java @@ -96,7 +96,7 @@ import java.util.*; * -T PrintReads \ * -o output.bam \ * -I input.bam \ - * -ds 0.25 + * -dfrac 0.25 * * */ @@ -124,12 +124,6 @@ public class PrintReads extends ReadWalker impleme @Argument(fullName = "number", shortName = "n", doc="Print the first n reads from the file, discarding the rest", required = false) int nReadsToPrint = -1; - /** - * Downsamples the bam file by the given ratio, printing only approximately the given percentage of reads. The downsampling is balanced (over the entire coverage) - */ - @Argument(fullName = "downsample_coverage", shortName = "ds", doc="Downsample BAM to desired coverage", required = false) - public double downsampleRatio = 1.0; - /** * Only reads from samples listed in the provided file(s) will be included in the output. */ @@ -237,8 +231,7 @@ public class PrintReads extends ReadWalker impleme nReadsToPrint--; // n > 0 means there are still reads to be printed. } - // if downsample option is turned off (= 1) then don't waste time getting the next random number. - return (downsampleRatio == 1 || random.nextDouble() < downsampleRatio); + return true; } /** From 51ec5404d4a50f6eb34915ad3f1f3016265f3ad0 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Tue, 18 Jun 2013 16:04:29 -0400 Subject: [PATCH 78/99] SAMDataSource: always consolidate cigar strings into canonical form -Collapses zero-length and repeated cigar elements, neither of which can necessarily be handled correctly by downstream code (like LIBS). -Consolidation is done before read filters, because not all read filters behave correctly with non-consoliated cigars. -Examined other uses of consolidateCigar() throughout the GATK, and found them to not be redundant with the new engine-level consolidation (they're all on artificially-created cigars in the HaplotypeCaller and SmithWaterman classes) -Improved comments in SAMDataSource.applyDecoratingIterators() -Updated MD5s; differences were examined and found to be innocuous -Two tests: -Unit test for ReadFormattingIterator -Integration test for correct handling of zero-length cigar elements by the GATK engine as a whole --- .../gatk/datasources/reads/SAMDataSource.java | 20 ++++---- .../iterators/ReadFormattingIterator.java | 5 ++ .../gatk/EngineFeaturesIntegrationTest.java | 22 ++++++++ .../ReadFormattingIteratorUnitTest.java | 50 +++++++++++++++++++ .../gatk/walkers/BAQIntegrationTest.java | 2 +- .../readutils/PrintReadsIntegrationTest.java | 6 +-- 6 files changed, 91 insertions(+), 14 deletions(-) create mode 100644 public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index 2f934e8df..a36667ec4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -625,16 +625,15 @@ public class SAMDataSource { byte defaultBaseQualities, boolean isLocusBasedTraversal ) { - // ************************************************************************************************ // - // * NOTE: ALL FILTERING/DOWNSAMPLING SHOULD BE DONE BEFORE ANY ITERATORS THAT MODIFY THE READS! * // - // * (otherwise we will process something that we may end up throwing away) * // - // ************************************************************************************************ // + // Always apply the ReadFormattingIterator before both ReadFilters and ReadTransformers. At a minimum, + // this will consolidate the cigar strings into canonical form. This has to be done before the read + // filtering, because not all read filters will behave correctly with things like zero-length cigar + // elements. If useOriginalBaseQualities is true or defaultBaseQualities >= 0, this iterator will also + // modify the base qualities. + wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - if (useOriginalBaseQualities || defaultBaseQualities >= 0) - // only wrap if we are replacing the original qualities or using a default base quality - wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - - // Filters: + // Read Filters: these are applied BEFORE downsampling, so that we downsample within the set of reads + // that actually survive filtering. Otherwise we could get much less coverage than requested. wrappedIterator = StingSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters)); // Downsampling: @@ -658,7 +657,8 @@ public class SAMDataSource { if (!noValidationOfReadOrder && enableVerification) wrappedIterator = new VerifyingSamIterator(wrappedIterator); - // set up read transformers + // Read transformers: these are applied last, so that we don't bother transforming reads that get discarded + // by the read filters or downsampler. for ( final ReadTransformer readTransformer : readTransformers ) { if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT ) wrappedIterator = new ReadTransformingIterator(wrappedIterator, readTransformer); diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java index c3b4aaa0a..f9d2f4802 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.iterators; import net.sf.samtools.SAMRecord; import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; /** * An iterator which does post-processing of a read, including potentially wrapping @@ -104,6 +105,10 @@ public class ReadFormattingIterator implements StingSAMIterator { public SAMRecord next() { SAMRecord rec = wrappedIterator.next(); + // Always consolidate the cigar string into canonical form, collapsing zero-length / repeated cigar elements. + // Downstream code (like LocusIteratorByState) cannot necessarily handle non-consolidated cigar strings. + rec.setCigar(AlignmentUtils.consolidateCigar(rec.getCigar())); + // if we are using default quals, check if we need them, and add if necessary. // 1. we need if reads are lacking or have incomplete quality scores // 2. we add if defaultBaseQualities has a positive value diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index fe30b60fd..c97ab7301 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -25,6 +25,8 @@ package org.broadinstitute.sting.gatk; +import net.sf.samtools.SAMFileReader; +import net.sf.samtools.SAMRecord; import net.sf.samtools.util.BlockCompressedInputStream; import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; @@ -39,6 +41,7 @@ import org.broadinstitute.sting.gatk.walkers.qc.ErrorThrowing; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; import org.broadinstitute.variant.vcf.VCFCodec; import org.broadinstitute.variant.vcf.VCFHeader; import org.broadinstitute.variant.vcf.VCFHeaderLine; @@ -255,4 +258,23 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, "")); } + @Test + public void testGATKEngineConsolidatesCigars() { + final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" + + " -R " + b37KGReference + + " -I " + privateTestDir + "zero_length_cigar_elements.bam" + + " -o %s", + 1, Arrays.asList("")); // No MD5s; we only want to check the cigar + + final File outputBam = executeTest("testGATKEngineConsolidatesCigars", spec).first.get(0); + final SAMFileReader reader = new SAMFileReader(outputBam); + reader.setValidationStringency(SAMFileReader.ValidationStringency.SILENT); + reader.setSAMRecordFactory(new GATKSamRecordFactory()); + + final SAMRecord read = reader.iterator().next(); + reader.close(); + + // Original cigar was 0M3M0M8M. Check that it's been consolidated after running through the GATK engine: + Assert.assertEquals(read.getCigarString(), "11M", "Cigar 0M3M0M8M not consolidated correctly by the engine"); + } } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java new file mode 100644 index 000000000..5d037bc4b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.iterators; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; + + +public class ReadFormattingIteratorUnitTest extends BaseTest { + + @Test + public void testIteratorConsolidatesCigars() { + final Cigar unconsolidatedCigar = TextCigarCodec.getSingleton().decode("3M0M5M0M"); + final SAMRecord unconsolidatedRead = ArtificialSAMUtils.createArtificialRead(unconsolidatedCigar); + + final StingSAMIterator readIterator = StingSAMIteratorAdapter.adapt(Arrays.asList(unconsolidatedRead).iterator()); + final ReadFormattingIterator formattingIterator = new ReadFormattingIterator(readIterator, false, (byte)-1); + final SAMRecord postIterationRead = formattingIterator.next(); + + Assert.assertEquals(postIterationRead.getCigarString(), "8M", "Cigar 3M0M5M0M not consolidated correctly by ReadFormattingIterator"); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java index 6b0422c6a..604c0e377 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java @@ -43,7 +43,7 @@ public class BAQIntegrationTest extends WalkerTest { // -------------------------------------------------------------------------------------------------------------- @Test public void testPrintReadsNoBAQ() { - WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("11af64ba020262d06b490bae2c5e08f8")); + WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("d1f74074e718c82810512bf40dbc7f72")); executeTest(String.format("testPrintReadsNoBAQ"), spec); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java index 7482eae60..adc7ad765 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java @@ -59,10 +59,10 @@ public class PrintReadsIntegrationTest extends WalkerTest { {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -simplifyBAM", "1510dc4429f3ed49caf96da41e8ed396")}, {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -n 10", "0e3d1748ad1cb523e3295cab9d09d8fc")}, // See: GATKBAMIndex.getStartOfLastLinearBin(), BAMScheduler.advance(), IntervalOverlapFilteringIterator.advance() - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "e1cac555f3d720f611c47eec93e84bd9")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "d7f23fd77d7dc7cb50d3397f644c6d8a")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L unmapped", "2d32440e47e8d9d329902fe573ad94ce")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "oneReadAllInsertion.bam", "", "349650b6aa9e574b48a2a62627f37c7d")}, {new PRTest(b37KGReference, "NA12878.1_10mb_2_10mb.bam", "", "0c1cbe67296637a85e80e7a182f828ab")} }; From 08f92bb6f9ab7d74e82455a95a73aea9a0d603a1 Mon Sep 17 00:00:00 2001 From: Valentin Ruano-Rubio Date: Thu, 13 Jun 2013 18:38:11 -0400 Subject: [PATCH 79/99] Added AnalyzeCovariates tool to generate BQSR assessment quality plots. Implemtation details: * Added tool class *.AnalyzeCovariates * Added convenient addAll method to Utils to be able to add elements of an array. * Added parameter comparison methods to RecalibrationArgumentCollection class in order to verify that multiple imput recalibration report are compatible and comparable. * Modified the BQSR.R script to handle up to 3 different recalibration tables (-BQSR, -before and -after) and removed some irrelevant arguments (or argument values) from the output. * Added an integration test class. --- .../gatk/walkers/bqsr/AnalyzeCovariates.java | 583 ++++++++++++++++++ .../sting/gatk/walkers/bqsr/BQSRGatherer.java | 4 +- .../gatk/walkers/bqsr/BaseRecalibrator.java | 8 +- .../bqsr/RecalibrationArgumentCollection.java | 146 ++++- .../sting/utils/recalibration/RecalUtils.java | 194 +++++- .../recalibration/RecalibrationReport.java | 5 + .../covariates/ContextCovariate.java | 2 + .../AnalyzeCovariatesIntegrationTest.java | 362 +++++++++++ .../sting/utils/recalibration/BQSR.R | 36 +- .../org/broadinstitute/sting/utils/Utils.java | 30 + 10 files changed, 1341 insertions(+), 29 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java new file mode 100644 index 000000000..b6f911753 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java @@ -0,0 +1,583 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import com.google.java.contract.Requires; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Input; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.recalibration.RecalUtils; +import org.broadinstitute.sting.utils.recalibration.RecalibrationReport; +import org.broadinstitute.sting.utils.recalibration.BaseRecalibration; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; + + +/** + * Tool to analyze and evaluate base recalibration ables. + *

+ * For now it generates a plot report to assess the quality of a recalibration. + * + *

Input

+ * + * The tool can take up to three different sets of recalibration tables. + * The resulting plots will be overlaid on top of each other to make + * comparisons easy. + * + * + * + * + * + * + * + * + * + * + * + * + * + *
SetArgumentLabelColorDescription
Original-beforeBEFOREMaroon1First pass recalibration + * tables obtained from applying {@link BaseRecalibration} + * on the original alignment.
Recalibrated-afterAFTERBlueSecond pass recalibration tables + * results from the application of {@link BaseRecalibration} + * on the alignment recalibrated using the first pass tables
Input-BQSRBQSRBlackAny recalibration table without a specific role
+ *
+ * + * You need to specify one set at least. Multiple sets need to have the same values for the following parameters: + *

+ * covariate (order is not important), no_standard_covs, run_without_dbsnp, solid_recal_mode, + * solid_nocall_strategy, mismatches_context_size, mismatches_default_quality, deletions_default_quality, + * insertions_default_quality, maximum_cycle_value, low_quality_tail, default_platform, force_platform, + * quantizing_levels and binary_tag_name + *

Output

+ * + * Currently this tool generates two outputs: + * + *
+ *
-plots my-report.pdf
+ *
A pdf document that encloses plots to assess the quality of the recalibration.
+ *
-csv my-report.csv
+ *
A csv file that contains a table with all the data required to generate those plots.
+ *
+ * + * You need to specify at least one of them. + * + *

Other Arguments

+ * + *

-ignoreLMT, --ignoreLastModificationTimes

+ * + * when set, no warning message will be displayed in the -before recalibration table file is older than the -after one. + * + *

Examples

+ * + * + *

Plot a single recalibration table

+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -BQSR myrecal.table \
+ *      -plots BQSR.pdf
+ * 
+ * + *

Plot before (first pass) and after (second pass) recalibration table to compare them

+ * + *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots recalQC.pdf
+ * 
+ * + *

Plot up to three recalibration tables for comparison

+ * + *
+ *
+ * # You can ignore the before/after semantics completely if you like (if you do add -ignoreLMT
+ * # to avoid a possible warning), but all tables should have been generated using the same parameters.
+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -ignoreLMT \
+ *      -BQSR recal1.table \   # you can discard any two
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots myrecals.pdf
+ * 
+ * + *

Full BQSR quality assessment pipeline

+ * + *
+ * # Generate the first pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \ # optional but recommendable
+ *      -knownSites bundle/my-trusted-indels.vcf \ # optional but recommendable
+ *      ... other options
+ *      -o firstpass.table
+ *
+ * # Generate the second pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -BQSR firstpass.table \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \
+ *      -knownSites bundle/my-trusted-indels.vcf \
+ *      ... other options \
+ *      -o secondpass.table
+ *
+ * # Finally generate the plots report and also keep a copy of the csv (optional).
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before firstpass.table \
+ *      -after secondpass.table \
+ *      -csv BQSR.csv \ # optional
+ *      -plots BQSR.pdf
+ * 
+ * + * @author Valentin Ruano-Rubio <valentin@broadinstitute.org> + * @version 6/16/2013 + * @since 2.6 + */ +public final class AnalyzeCovariates extends RodWalker { + + + // Constants on option short names that are used in some error/warning messages: + + static final String CSV_ARG_SHORT_NAME = "csv"; + static final String PDF_ARG_SHORT_NAME = "plots"; + static final String BEFORE_ARG_SHORT_NAME = "before"; + static final String AFTER_ARG_SHORT_NAME = "after"; + + /** + * File containing the recalibration tables from the first pass. + */ + @Input(shortName=BEFORE_ARG_SHORT_NAME,fullName="beforeReportFile", doc = "file containing the BQSR first-pass report file",required = false) + protected File beforeFile = null; + + /** + * File containing the recalibration tables from the second pass. + */ + @Input(shortName=AFTER_ARG_SHORT_NAME, fullName="afterReportFile", doc = "file containing the BQSR second-pass report file",required = false) + protected File afterFile = null; + + /** + * If true, it won't show a warning if the last-modification time of the before and after input files suggest that they have been reversed. + */ + @Argument(shortName="ignoreLMT", fullName="ignoreLastModificationTimes", doc= "do not emit warning messages related to suspicious last modification time order of inputs", required = false) + protected boolean ignoreLastModificationTime = false; + + /** + * Output report file name. + */ + @Output(shortName=PDF_ARG_SHORT_NAME, fullName="plotsReportFile" ,doc = "location of the output report", required = false) + protected File pdfFile = null; + + /** + * Output csv file name. + */ + @Output(shortName=CSV_ARG_SHORT_NAME,fullName="intermediateCsvFile" ,doc = "location of the csv intermediate file", required = false) + protected File csvFile = null; + + /** + * Convenience reference to the RECAL_BQSR_FILE argument value. + *

+ * This field value is resolved by {@link #initialize()}. + */ + protected File bqsrFile = null; + + /** + * Checks inputs and argument values. + *

+ * Notice that this routine will not validate the content of files. It may have some minor side effects as + * the output of warning messages back to the user. + * + * @throw IllegalStateException there is some required argument value that has not been loaded yet. + * @throw UserException if there is some error caused by or under the end user's control. + */ + private void checkArgumentsValues() { + checkInputReportFile("BQSR",bqsrFile); + checkInputReportFile("before",beforeFile); + checkInputReportFile("after",afterFile); + if (bqsrFile == null && beforeFile == null && afterFile == null) { + throw new UserException("you must provide at least one recalibration report file " + + "(arguments -BQSR, -" + BEFORE_ARG_SHORT_NAME + " or -" + AFTER_ARG_SHORT_NAME); + } + + checkOutputFile(PDF_ARG_SHORT_NAME,pdfFile); + checkOutputFile(CSV_ARG_SHORT_NAME, csvFile); + checkInputReportFileLMT(beforeFile,afterFile); + checkOutputRequested(); + } + + /** + * Checks whether the last-modification-time of the inputs is consistent with their relative roles. + * + * This routine does not thrown an exception but may output a warning message if inconsistencies are spotted. + * + * @param beforeFile the before report file. + * @param afterFile the after report file. + */ + private void checkInputReportFileLMT(final File beforeFile, final File afterFile) { + + if (ignoreLastModificationTime || beforeFile == null || afterFile == null) { + return; // nothing to do here + } else if (beforeFile.lastModified() > afterFile.lastModified()) { + Utils.warnUser("Last modification timestamp for 'Before' and 'After'" + + "recalibration reports are in the wrong order. Perhaps, have they been swapped?"); + } + } + + /** + * Checks that at least one output was requested. + * + * @throw UserException if no output was requested. + */ + private void checkOutputRequested() { + if (pdfFile == null && csvFile == null) { + throw new UserException("you need to request at least one output:" + + " the intermediate csv file (-" + CSV_ARG_SHORT_NAME + " FILE)" + + " or the final plot file (-" + PDF_ARG_SHORT_NAME + " FILE)."); + } + } + + /** + * Checks the value provided to input file arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkInputReportFile(final String name,final File value) { + if (value == null) { + return; + } else if (!value.exists()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' does not exist or is unreachable"); + } else if (!value.isFile()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' is not a regular file"); + } else if (!value.canRead()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' cannot be read"); + } + } + + /** + * Checks the value provided for output arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkOutputFile(final String name, final File value) { + if (value == null) { + return; + } + if (value.exists() && !value.isFile()) { + throw new UserException.BadArgumentValue(name, "the output file location '" + + value + "' exists as not a file"); + } + final File parent = value.getParentFile(); + if (parent == null) { + return; + } + if (!parent.exists()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' does not exists or is unreachable"); + } else if (!parent.isDirectory()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' is not a directory"); + } else if (!parent.canWrite()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' cannot be written"); + } + + } + + /** + * Generates the plots using the external R script. + * + *

+ * If plotsFile is null, it does not perform any plotting. + * + * @param csvFile the intermediary csv file. + * @param plotsFile the output plot location. + */ + private void generatePlots(final File csvFile, final Map reportFiles, final File plotsFile) { + + if (plotsFile == null) { + return; + } + logger.info("Generating plots file '" + plotsFile + "'"); + final File exampleReportFile = reportFiles.values().iterator().next(); + RecalUtils.generatePlots(csvFile,exampleReportFile,plotsFile); + } + + @Override + public void initialize() { + super.initialize(); + bqsrFile = getToolkit().getArguments().BQSR_RECAL_FILE; + checkArgumentsValues(); + final Map reportFiles = buildReportFileMap(); + final Map reports = buildReportMap(reportFiles); + checkReportConsistency(reports); + final File csvFile = resolveCsvFile(); + generateCsvFile(csvFile,reports); + final File plotFile = resolvePlotFile(); + generatePlots(csvFile, reportFiles, plotFile); + } + + /** + * Returns the plot output file + * @return might be null if the user has not indicated and output file. + */ + private File resolvePlotFile() { + return pdfFile; + } + + /** + * Generates the intermediary Csv file. + * + * @param csvFile where to write the file. + * @param reports the reports to be included. + */ + private void generateCsvFile(final File csvFile, final Map reports) { + try { + logger.info("Generating csv file '" + csvFile + "'"); + RecalUtils.generateCsv(csvFile, reports); + } catch (FileNotFoundException e) { + throw new UserException( + String.format("There is a problem creating the intermediary Csv file '%s': %s", + csvFile,e.getMessage()),e); + } + } + + /** + * Checks whether multiple input recalibration report files argument values are consistent (equal). + * + * @param reports map with report to verify. + * + * @throw UserException if there is any inconsistency. + */ + private void checkReportConsistency(final Map reports) { + final Map.Entry[] reportEntries = + reports.entrySet().toArray((Map.Entry[]) new Map.Entry[reports.size()]); + + final Map.Entry exampleEntry = reportEntries[0]; + + for (int i = 1; i < reportEntries.length; i++) { + final Map diffs = exampleEntry.getValue().getRAC().compareReportArguments( + reportEntries[i].getValue().getRAC(),exampleEntry.getKey(),reportEntries[i].getKey()); + if (diffs.size() != 0) { + throw new UserException("There are differences in relevant arguments of" + + " two or more input recalibration reports. Please make sure" + + " they have been created using the same recalibration parameters." + + " " + Utils.join("// ", reportDifferencesStringArray(diffs))); + } + } + } + + + /** + * Creates a map with all input recalibration files indexed by their "role". + *

+ * The key is the role and the value the corresponding report file. + *

+ * Roles: "Before" (recalibration), "After" (recalibration), "BQSR" (the tool standard argument recalibration file) + * + * @return never null + */ + private Map buildReportFileMap() { + final Map reports = new LinkedHashMap<>(3); + if (bqsrFile != null) { + reports.put("BQSR",bqsrFile); + } + if (beforeFile != null) { + reports.put("Before",beforeFile); + } + if (afterFile != null) { + reports.put("After",afterFile); + } + return reports; + } + + /** + * Transforms a recalibration file map into a report object map. + * + * @param reportFileMap the file map to transforms. + * @return never null, a new map with the same size as + * reportFileMap and the same key set. + */ + @Requires("reportFileMap != null") + private Map buildReportMap(final Map reportFileMap) { + final Map reports = new LinkedHashMap<>(reportFileMap.size()); + for (final Map.Entry e : reportFileMap.entrySet()) { + reports.put(e.getKey(),new RecalibrationReport(e.getValue())); + } + return reports; + } + + /** + * Generates a flatter String array representation of recalibration argument differences. + * @param diffs the differences to represent. + * + * @return never null, an array of the same length as the size of the input diffs. + */ + @Requires("diffs != null") + private String[] reportDifferencesStringArray(final Map diffs) { + final String[] result = new String[diffs.size()]; + int i = 0; + for (final Map.Entry e : diffs.entrySet()) { + result[i++] = capitalize(e.getKey()) + ": " + e.getValue(); + } + return result; + } + + /** + * Returns the input string capitalizing the first letter. + * + * @param str the string to capitalize + * @return never null. + */ + @Requires("str != null") + private String capitalize(final String str) { + if (str.isEmpty()) { + return str; + } else { + return Character.toUpperCase(str.charAt(0)) + str.substring(1); + } + } + + /** + * Returns the csv file to use. + *

+ * This is the the one specified by the user if any or a temporary file + * that will be deleted as soon as the VM exists by default. + * + * @return never null. + */ + private File resolveCsvFile() { + if (csvFile != null) { + return csvFile; + } else { + try { + final File result = File.createTempFile("AnalyzeCovariates", ".csv"); + result.deleteOnExit(); + return result; + } catch (IOException e) { + throw new UserException("Could not create temporary Csv file",e); + } + } + } + + /** + * Always return true, forcing the immediate termination of the travesal. + * @return + */ + @Override + public boolean isDone() { + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public None reduceInit() { + return new None(); + } + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None reduce(None value, None sum) { + throw new IllegalStateException("AnalyzeCovariates reduce method is not supposed to be invoked ever"); + } + + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + throw new IllegalStateException("AnalyzeCovariates map method is not supposed to be invoked ever"); + } + + /** + * Dummy map and reduce types for the {@link AnalyzeCovariates} tool that in fact does not do any traversal. + */ + protected static class None { + private None() { + } + } +} + + diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java index ad97dc008..7727c2dac 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java @@ -97,10 +97,10 @@ public class BQSRGatherer extends Gatherer { RAC.RECAL_TABLE_FILE = output; if ( RAC.existingRecalibrationReport != null ) { final RecalibrationReport originalReport = new RecalibrationReport(RAC.existingRecalibrationReport); - RecalUtils.generateRecalibrationPlot(RAC, originalReport.getRecalibrationTables(), generalReport.getRecalibrationTables(), generalReport.getCovariates()); + RecalUtils.generateRecalibrationPlot(RAC, originalReport.getRecalibrationTables(), generalReport.getRecalibrationTables(), generalReport.getRequestedCovariates()); } else { - RecalUtils.generateRecalibrationPlot(RAC, generalReport.getRecalibrationTables(), generalReport.getCovariates()); + RecalUtils.generateRecalibrationPlot(RAC, generalReport.getRecalibrationTables(), generalReport.getRequestedCovariates()); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index c60eceaa4..41d3f3991 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -61,6 +61,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.baq.BAQ; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.collections.Pair; @@ -124,7 +125,7 @@ import java.util.List; * -R resources/Homo_sapiens_assembly18.fasta \ * -knownSites bundle/hg18/dbsnp_132.hg18.vcf \ * -knownSites another/optional/setOfSitesToMask.vcf \ - * -o recal_data.grp + * -o recal_data.table * */ @@ -179,6 +180,11 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche public void initialize() { baq = new BAQ(BAQGOP); // setup the BAQ object with the provided gap open penalty + if (RAC.RECAL_PDF_FILE != null) { + Utils.warnUser("This is not the recommended way to generate recalibration plots any longer and will be" + + " discontinued soon in future releases. Please use the 'AnalyzeCovariates' tool instead from now one"); + } + if (RAC.FORCE_PLATFORM != null) RAC.DEFAULT_PLATFORM = RAC.FORCE_PLATFORM; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java index 5a2cdc7a6..c1ecb2320 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java @@ -46,15 +46,17 @@ package org.broadinstitute.sting.gatk.walkers.bqsr; +import com.google.java.contract.Requires; import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.report.GATKReportTable; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.StingException; import org.broadinstitute.sting.utils.recalibration.RecalUtils; import java.io.File; import java.io.PrintStream; -import java.util.Collections; -import java.util.List; +import java.util.*; /** * Created by IntelliJ IDEA. @@ -65,7 +67,7 @@ import java.util.List; * This set of arguments will also be passed to the constructor of every Covariate when it is instantiated. */ -public class RecalibrationArgumentCollection { +public class RecalibrationArgumentCollection implements Cloneable { /** * This algorithm treats every reference mismatch as an indication of error. However, real genetic variation is expected to mismatch the reference, @@ -289,4 +291,142 @@ public class RecalibrationArgumentCollection { return argumentsTable; } + /** + * Returns a map with the arguments that differ between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key is the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * Thus, a empty map indicates that there is no differences between both argument collection that + * is relevant to report comparison. + *

+ * This method should not throw any exception. + * + * @param other the argument-collection to compare against. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @return never null, but a zero-size collection if there are no differences. + */ + @Requires("other != null && thisRole != null && otherRole != null && !thisRole.equalsIgnoreCase(otherRole)") + Map compareReportArguments(final RecalibrationArgumentCollection other,final String thisRole, final String otherRole) { + final Map result = new LinkedHashMap<>(15); + compareRequestedCovariates(result, other, thisRole, otherRole); + compareSimpleReportArgument(result,"no_standard_covs", DO_NOT_USE_STANDARD_COVARIATES, other.DO_NOT_USE_STANDARD_COVARIATES, thisRole, otherRole); + compareSimpleReportArgument(result,"run_without_dbsnp",RUN_WITHOUT_DBSNP,other.RUN_WITHOUT_DBSNP,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_recal_mode", SOLID_RECAL_MODE, other.SOLID_RECAL_MODE,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_nocall_strategy", SOLID_NOCALL_STRATEGY, other.SOLID_NOCALL_STRATEGY,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_context_size", MISMATCHES_CONTEXT_SIZE,other.MISMATCHES_CONTEXT_SIZE,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_default_quality", MISMATCHES_DEFAULT_QUALITY, other.MISMATCHES_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"deletions_default_quality", DELETIONS_DEFAULT_QUALITY, other.DELETIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"insertions_default_quality", INSERTIONS_DEFAULT_QUALITY, other.INSERTIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"maximum_cycle_value", MAXIMUM_CYCLE_VALUE, other.MAXIMUM_CYCLE_VALUE,thisRole,otherRole); + compareSimpleReportArgument(result,"low_quality_tail", LOW_QUAL_TAIL, other.LOW_QUAL_TAIL,thisRole,otherRole); + compareSimpleReportArgument(result,"default_platform", DEFAULT_PLATFORM, other.DEFAULT_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"force_platform", FORCE_PLATFORM, other.FORCE_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"quantizing_levels", QUANTIZING_LEVELS, other.QUANTIZING_LEVELS,thisRole,otherRole); + compareSimpleReportArgument(result,"binary_tag_name", BINARY_TAG_NAME, other.BINARY_TAG_NAME,thisRole,otherRole); + return result; + } + + + /** + * Compares the covariate report lists. + * + * @param diffs map where to annotate the difference. + * @param other the argument collection to compare against. + * @param thisRole the name for this argument collection that makes sense to the user. + * @param otherRole the name for the other argument collection that makes sense to the end user. + * + * @return true if a difference was found. + */ + @Requires("diffs != null && other != null && thisRole != null && otherRole != null") + private boolean compareRequestedCovariates(final Map diffs, + final RecalibrationArgumentCollection other, final String thisRole, final String otherRole) { + + final Set beforeNames = new HashSet<>(this.COVARIATES.length); + final Set afterNames = new HashSet<>(other.COVARIATES.length); + Utils.addAll(beforeNames, this.COVARIATES); + Utils.addAll(afterNames,other.COVARIATES); + final Set intersect = new HashSet<>(Math.min(beforeNames.size(),afterNames.size())); + intersect.addAll(beforeNames); + intersect.retainAll(afterNames); + + String diffMessage = null; + if (intersect.size() == 0) { // In practice this is not possible due to required covariates but... + diffMessage = String.format("There are no common covariates between '%s' and '%s'" + + " recalibrator reports. Covariates in '%s': {%s}. Covariates in '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",this.COVARIATES), + otherRole,Utils.join(",",other.COVARIATES)); + } else if (intersect.size() != beforeNames.size() || intersect.size() != afterNames.size()) { + beforeNames.removeAll(intersect); + afterNames.removeAll(intersect); + diffMessage = String.format("There are differences in the set of covariates requested in the" + + " '%s' and '%s' recalibrator reports. " + + " Exclusive to '%s': {%s}. Exclusive to '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",beforeNames), + otherRole,Utils.join(", ",afterNames)); + } + if (diffMessage != null) { + diffs.put("covariate",diffMessage); + return true; + } else { + return false; + } + } + + /** + * Annotates a map with any difference encountered in a simple value report argument that differs between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key of the new entry would be the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * + *

+ * This method should not return any exception. + * + * @param diffs where to annotate the differences. + * @param name the name of the report argument to compare. + * @param thisValue this argument collection value for that argument. + * @param otherValue the other collection value for that argument. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @type T the argument Object value type. + * + * @return true if a difference has been spotted, thus diff has been modified. + */ + private boolean compareSimpleReportArgument(final Map diffs, + final String name, final T thisValue, final T otherValue, final String thisRole, final String otherRole) { + if (thisValue == null && otherValue == null) { + return false; + } else if (thisValue != null && thisValue.equals(otherValue)) { + return false; + } else { + diffs.put(name, + String.format("differences between '%s' {%s} and '%s' {%s}.", + thisRole,thisValue == null ? "" : thisValue, + otherRole,otherValue == null ? "" : otherValue)); + return true; + } + + } + + /** + * Create a shallow copy of this argument collection. + * + * @return never null. + */ + @Override + public RecalibrationArgumentCollection clone() { + try { + return (RecalibrationArgumentCollection) super.clone(); + } catch (CloneNotSupportedException e) { + throw new StingException("Unreachable code clone not supported thrown when the class " + + this.getClass().getName() + " is cloneable ",e); + } + } + } diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java index ae6b56e19..8908ce4a4 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java @@ -70,9 +70,7 @@ import org.broadinstitute.sting.utils.sam.GATKSAMReadGroupRecord; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import java.io.File; -import java.io.IOException; -import java.io.PrintStream; +import java.io.*; import java.util.*; /** @@ -223,6 +221,150 @@ public class RecalUtils { } } + /** + * Component used to print out csv representation of the reports that can be use to perform analysis in + * external tools. E.g. generate plots using R scripts. + *

+ * A header is always printed into the output stream (or file) when the printer is created. Then you only need + * to call {@link #print(RecalibrationReport,String) print} for each report you want to include in the csv file. + * Once finished, you close the printer calling {@link #close() close} + * + */ + private static class CsvPrinter { + + private final PrintStream ps; + private final Covariate[] covariates; + + /** + * Constructs a printer redirected to an output file. + * @param out the output file. + * @param c covariates to print out. + * @throws FileNotFoundException if the file could not be created anew. + */ + protected CsvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException { + this(new FileOutputStream(out), c); + } + + /** + * Constructs a printer redirected to an output stream + * @param os the output. + * @param c covariates to print out. + */ + protected CsvPrinter(final OutputStream os, final Covariate ... c) { + covariates = c == null ? new Covariate[0] : c.clone(); + ps = new PrintStream(os); + printHeader(); + } + + /** + * Prints the header out. + *

+ * Should only be invoked at creation. + */ + protected void printHeader() { + RecalUtils.printHeader(ps); + } + + /** + * Prints out a report into the csv file. + * + * + * @param report the report to print out. + * @param mode the report associated mode. (typically ORIGINAL, RECALIBRATED + */ + public void print(final RecalibrationReport report, final String mode) { + RecalUtils.writeCSV(ps,report.getRecalibrationTables(),mode,covariates,false); + } + + /** + * Close the csv printer. + * + * No further output will be allowed or take place after calling this method. + */ + public void close() { + ps.close(); + } + + } + + /** + * Returns a csv output printer. + * + * @param out the output file. It will be overridden + * @param c list of covariates to print out. + * + * @throws FileNotFoundException if out could not be created anew. + * + * @return never null + */ + protected static CsvPrinter csvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException + { + if (c == null) { + throw new IllegalArgumentException("the input covariate array cannot be null"); + } + return new CsvPrinter(out,c); + } + + /** + * Prints out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + *

+ * The set of covariates is take as the minimum common set from all reports. + * + * @param out the output file. It will be overridden. + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @throws FileNotFoundException if out could not be created anew. + */ + public static void generateCsv(final File out, final Map reports) + throws FileNotFoundException { + if (reports.size() == 0) { + writeCsv(out, reports, new Covariate[0]); + } else { + final Iterator rit = reports.values().iterator(); + final RecalibrationReport first = rit.next(); + final Covariate[] firstCovariates = first.getRequestedCovariates(); + final Set covariates = new LinkedHashSet<>(); + Utils.addAll(covariates,firstCovariates); + while (rit.hasNext() && covariates.size() > 0) { + final Covariate[] nextCovariates = rit.next().getRequestedCovariates(); + final Set nextCovariateNames = new LinkedHashSet(nextCovariates.length); + for (final Covariate nc : nextCovariates) { + nextCovariateNames.add(nc.getClass().getSimpleName()); + } + final Iterator cit = covariates.iterator(); + while (cit.hasNext()) { + if (!nextCovariateNames.contains(cit.next().getClass().getSimpleName())) { + cit.remove(); + } + } + } + writeCsv(out, reports, covariates.toArray(new Covariate[covariates.size()])); + } + } + + /** + * Print out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + * + * @param out + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @param c the covariates to print out. + * @throws FileNotFoundException if out could not be created anew. + */ + private static void writeCsv(final File out, + final Map reports, final Covariate[] c) + throws FileNotFoundException { + final CsvPrinter p = csvPrinter(out,c); + for (Map.Entry e : reports.entrySet()) { + p.print(e.getValue(),e.getKey()); + } + p.close(); + } + public enum SOLID_RECAL_MODE { /** * Treat reference inserted bases as reference matching bases. Very unsafe! @@ -390,6 +532,24 @@ public class RecalUtils { report.print(outputFile); } + /** s + * Write recalibration plots into a file + * + * @param csvFile location of the intermediary file + * @param exampleReportFile where the report arguments are collected from. + * @param output result plot file name. + */ + public static void generatePlots(final File csvFile, final File exampleReportFile, final File output) { + final RScriptExecutor executor = new RScriptExecutor(); + executor.setExceptOnError(true); + executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); + executor.addArgs(csvFile.getAbsolutePath()); + executor.addArgs(exampleReportFile.getAbsolutePath()); + executor.addArgs(output.getAbsolutePath()); + Logger.getLogger(RecalUtils.class).debug("R command line: " + executor.getApproximateCommandLine()); + executor.exec(); + } + private static void outputRecalibrationPlot(final RecalibrationArgumentCollection RAC) { final RScriptExecutor executor = new RScriptExecutor(); @@ -452,18 +612,7 @@ public class RecalUtils { // output the csv file if (printHeader) { - final List header = new LinkedList(); - header.add("ReadGroup"); - header.add("CovariateValue"); - header.add("CovariateName"); - header.add("EventType"); - header.add("Observations"); - header.add("Errors"); - header.add("EmpiricalQuality"); - header.add("AverageReportedQuality"); - header.add("Accuracy"); - header.add("Recalibration"); - deltaTableFile.println(Utils.join(",", header)); + printHeader(deltaTableFile); } final Map covariateNameMap = new HashMap(requestedCovariates.length); @@ -480,6 +629,21 @@ public class RecalUtils { } } + private static void printHeader(PrintStream out) { + final List header = new LinkedList(); + header.add("ReadGroup"); + header.add("CovariateValue"); + header.add("CovariateName"); + header.add("EventType"); + header.add("Observations"); + header.add("Errors"); + header.add("EmpiricalQuality"); + header.add("AverageReportedQuality"); + header.add("Accuracy"); + header.add("Recalibration"); + out.println(Utils.join(",", header)); + } + /* * Return an initialized nested integer array with appropriate dimensions for use with the delta tables * diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java index ea45c2abf..ed9afa733 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java @@ -369,6 +369,11 @@ public class RecalibrationReport { return RAC; } + /** + * + * @deprecated use {@link #getRequestedCovariates()} instead. + */ + @Deprecated public Covariate[] getCovariates() { return requestedCovariates; } diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java index 4fc9470f4..79ffa50a3 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java @@ -67,6 +67,8 @@ import java.util.ArrayList; public class ContextCovariate implements StandardCovariate { private final static Logger logger = Logger.getLogger(ContextCovariate.class); + + private int mismatchesContextSize; private int indelsContextSize; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java new file mode 100644 index 000000000..8c327efc0 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java @@ -0,0 +1,362 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.*; + +import static org.testng.Assert.assertTrue; + +/** + * Tests Analyze Covariates. + *

+ * Notice that since PDF report generated by R are different every-time this program + * is executed their content won't be tested. It only will verify that file has a healthy size. + * + */ +public class AnalyzeCovariatesIntegrationTest extends WalkerTest { + + private static final String TOOL_NAME = AnalyzeCovariates.class.getSimpleName(); + + /** + * Directory where the testdata is located. + */ + private static final File TEST_DATA_DIR = new File(privateTestDir,"AnalyzeCovariates"); + + /** + * File containing the before report for normal testing. + */ + private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.grp"); + + /** + * File containing the after report for normal testing. + */ + private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.grp"); + + + /** + * File containing the bqsr report for normal testing. + */ + private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.grp"); + + /** + * Test the content of the generated csv file. + * + * @throws IOException should never happen. It would be an indicator of a + * problem with the testing environment. + */ + @Test(enabled = true) + public void testCsvGeneration() + throws IOException { + + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine("%s",null,true,true,true), + Collections.singletonList("106709d32e6f0a0a9dd6a6340ec246ab")); + executeTest("testCsvGeneration",spec); + } + + + /** + * Test the size of the generated pdf. + *

+ * Unfortunately we cannot test the content as it changes slightly + * every time the tool is run. + * + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true) + public void testPdfGeneration() + throws IOException { + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.delete(); + pdfFile.deleteOnExit(); + + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,pdfFile.toString(),true,true,true),md5); + executeTest("testPdfGeneration",spec); + assertTrue(pdfFile.exists(),"the pdf file was not created"); + assertTrue(pdfFile.length() > 260000,"the pdf file size does" + + " not reach the minimum of 260Kb"); + } + + /** + * Test the effect of changing some recalibration parameters. + * @param afterFileName name of the alternative after recalibration file. + * @param description describes what has been changed. + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeAfterFileProvider") + public void testParameterChangeException(final String afterFileName, + final String description) + throws IOException { + + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.deleteOnExit(); + final List md5 = Collections.emptyList(); + final File afterFile = new File(TEST_DATA_DIR,afterFileName); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,"%s",true,true,afterFile), + 1,UserException.class); + executeTest("testParameterChangeException - " + description, spec); + } + + + /** + * Test combinations of input and output inclusion exclusion of the command + * line that cause an exception to be thrown. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsenceException(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),0,UserException.class); + executeTest("testInOutAbsencePresenceException", spec); + } + + /** + * Test combinations of input and output inclusion exclusion of the + * command line that won't cause an exception. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsence(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),md5); + executeTest("testInOutAbsencePresence", spec); + } + + + + @DataProvider + public Iterator alternativeInOutAbsenceCombinations(Method m) { + List result = new LinkedList(); + if (m.getName().endsWith("Exception")) { + result.add(new Object[] { false, false, true, true, true }); + result.add(new Object[] { true, true, false, false ,false}); + } + else { + result.add(new Object[] { true, true, true, false, false }); + result.add(new Object[] { true, true, false, true, false }); + result.add(new Object[] { true, true, false, false, true }); + result.add(new Object[] { true, false,false, true, false }); + result.add(new Object[] { false, true, true, false, false }); + + } + return result.iterator(); + } + + /** + * Provide recalibration parameter change data to relevant tests. + * @param m target test method. + * @return never null. + */ + @DataProvider + public Iterator alternativeAfterFileProvider (Method m) { + final boolean expectsException = m.getName().endsWith("Exception"); + final List result = new LinkedList(); + for (final Object[] data : DIFFERENT_PARAMETERS_AFTER_FILES) { + if (data[1].equals(expectsException)) { + result.add(new Object[] { data[0], data[2] }); + } + } + return result.iterator(); + } + + /** + * Triplets < alfter-grp-file, whether it should fail, what is different > + */ + private final Object[][] DIFFERENT_PARAMETERS_AFTER_FILES = { + {"after-cov.grp", true, "Adds additional covaraite: repeat-length"}, + {"after-dpSOLID.grp", true, "Change the default platform to SOLID"}, + {"after-noDp.grp",true, "Unset the default platform"}, + {"after-mcs4grp", true, "Changed -mcs parameter from 2 to 4"} + }; + + /** + * Build the AC command line given what combinations of input and output files should be included. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @return never null. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + private String buildCommandLine(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + + final File csvFile = useCsvFile ? File.createTempFile("ACTest",".csv") : null; + final File pdfFile = usePdfFile ? File.createTempFile("ACTest",".pdf") : null; + + if (csvFile != null) { + csvFile.deleteOnExit(); + } + + if (pdfFile != null) { + pdfFile.deleteOnExit(); + } + + return buildCommandLine(csvFile == null ? null : csvFile.toString(), + pdfFile == null ? null : pdfFile.toString(), + useBQSRFile,useBeforeFile,useAfterFile); + } + + /** + * Build the AC command line given the output file names explicitly and what test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final boolean useAfterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + useAfterFile ? AFTER_FILE : null); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final File afterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + afterFile); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param bqsrFile the BQSR input report file, null if none should be provided. + * @param beforeFile the before input report file, null if non should be provided. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final File bqsrFile, + final File beforeFile, final File afterFile) { + + final List args = new LinkedList(); + args.add("-T"); + args.add(TOOL_NAME); + args.add("-R"); + args.add(hg19Reference); + args.add("-ignoreLMT"); + + if (csvFileName != null) { + args.add("-" + AnalyzeCovariates.CSV_ARG_SHORT_NAME); + args.add("'" + csvFileName + "'"); + } + if (pdfFileName != null) { + args.add("-" + AnalyzeCovariates.PDF_ARG_SHORT_NAME); + args.add("'" + pdfFileName + "'"); + } + if (bqsrFile != null) { + args.add("-BQSR"); + args.add("'" + bqsrFile.getAbsoluteFile().toString() + "'"); + } + if (beforeFile != null) { + args.add("-" + AnalyzeCovariates.BEFORE_ARG_SHORT_NAME); + args.add("'" + beforeFile.getAbsolutePath().toString() + "'"); + } + if (afterFile != null) { + args.add("-" + AnalyzeCovariates.AFTER_ARG_SHORT_NAME); + args.add("'" + afterFile.getAbsolutePath().toString() + "'"); + } + return Utils.join(" ", args); + + } +} diff --git a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R index 8a9eecf48..bc53e29dc 100644 --- a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R +++ b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R @@ -12,7 +12,27 @@ if ( interactive() ) { args <- commandArgs(TRUE) } data <- read.csv(args[1]) + +data$Recalibration = as.factor(sapply(as.character(data$Recalibration),function(x) { + xu = toupper(x); + if (xu == "ORIGINAL") "BEFORE" else + if (xu == "RECALIBRATED") "AFTER" else + if (xu == "RECALIBRATION") "BQSR" else + xu })); + gsa.report <- gsa.read.gatkreport(args[2]) + +gsa.report$Arguments$Value = as.character(gsa.report$Arguments$Value); +gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "plot_pdf_file"); +if (length(levels(data$Recalibration)) > 1) { + gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "recalibration_report"); +} +gsa.report$Arguments$Value[gsa.report$Argument$Value == "null"] = "None"; + +gsa.report.covariate.argnum = gsa.report$Arguments$Argument == "covariate"; +gsa.report$Arguments$Value[gsa.report.covariate.argnum] = sapply(strsplit(gsa.report$Arguments$Value[gsa.report.covariate.argnum],","),function(x) { + y = sub("(^.+)Covariate","\\1",x); paste(y,collapse=",") } ); + data <- within(data, EventType <- factor(EventType, levels = rev(levels(EventType)))) numRG = length(unique(data$ReadGroup)) @@ -54,31 +74,31 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn d=rbind(dSub, dIns, dDel) if( cov != "QualityScore" ) { - p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + + p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + ylim(min(-10,d$Accuracy),max(10,d$Accuracy)) + geom_abline(intercept=0, slope=0, linetype=2) + xlab(paste(cov,"Covariate")) + ylab("Quality Score Accuracy") + blankTheme if(cov == "Cycle") { - b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate")) + - ylab("Mean Quality Score") + + ylab("Mean Quality Score") + ylim(0,max(42,d$AverageReportedQuality)); blankTheme - e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } else { - c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) + xlab(paste(cov,"Covariate (3 base suffix)")) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate (3 base suffix)")) + ylab("Mean Quality Score") + blankTheme - f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } @@ -88,14 +108,14 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn xlab("Reported Quality Score") + ylab("Empirical Quality Score") + blankTheme - a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) p <- ggplot(d, aes(x=CovariateValue)) + xlab(paste(cov,"Covariate")) + ylab("No. of Observations (area normalized)") + blankTheme d <- p + geom_histogram(aes(fill=Recalibration,weight=Observations,y=..ndensity..),alpha=0.6,binwidth=1,position="identity") - d <- d + scale_fill_manual(values=c("maroon1","blue")) + d <- d + scale_fill_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) d <- d + facet_grid(.~EventType) # d <- d + scale_y_continuous(formatter="comma") } diff --git a/public/java/src/org/broadinstitute/sting/utils/Utils.java b/public/java/src/org/broadinstitute/sting/utils/Utils.java index 73a538ee5..75bd6a3d1 100644 --- a/public/java/src/org/broadinstitute/sting/utils/Utils.java +++ b/public/java/src/org/broadinstitute/sting/utils/Utils.java @@ -683,6 +683,36 @@ public class Utils { return denom == 0 ? "NA" : String.format("%.2f", num / (1.0 * denom)); } + /** + * Adds element from an array into a collection. + * + * In the event of exception being throw due to some element, dest might have been modified by + * the successful addition of element before that one. + * + * @param dest the destination collection which cannot be null and should be able to accept + * the input elements. + * @param elements the element to add to dest + * @param collection type element. + * @throws UnsupportedOperationException if the add operation + * is not supported by dest. + * @throws ClassCastException if the class of any of the elements + * prevents it from being added to dest. + * @throws NullPointerException if any of the elements is null and dest + * does not permit null elements + * @throws IllegalArgumentException if some property of any of the elements + * prevents it from being added to this collection + * @throws IllegalStateException if any of the elements cannot be added at this + * time due to insertion restrictions. + * @return true if the collection was modified as a result. + */ + public static boolean addAll(Collection dest, T ... elements) { + boolean result = false; + for (final T e : elements) { + result = dest.add(e) | result; + } + return result; + } + /** * Create a constant map that maps each value in values to itself */ From 1f8282633beed08027da4aa4525464041b309c83 Mon Sep 17 00:00:00 2001 From: Valentin Ruano-Rubio Date: Wed, 19 Jun 2013 11:44:18 -0400 Subject: [PATCH 80/99] Removed plots generation from the BaseRecalibration software Improved AnalyzeCovariates (AC) integration test. Renamed AC test files ending with .grp to .table Implementation: * Removed RECAL_PDF/CSV_FILE from RecalibrationArgumentCollection (RAC). Updated rest of the code accordingly. * Fixed BQSRIntegrationTest to work with new changes --- .../gatk/walkers/bqsr/AnalyzeCovariates.java | 2 +- .../sting/gatk/walkers/bqsr/BQSRGatherer.java | 12 ----- .../gatk/walkers/bqsr/BaseRecalibrator.java | 20 -------- .../bqsr/RecalibrationArgumentCollection.java | 17 ------- .../sting/utils/recalibration/RecalUtils.java | 38 ++++++++++----- .../recalibration/RecalibrationReport.java | 3 -- .../AnalyzeCovariatesIntegrationTest.java | 16 +++---- .../walkers/bqsr/BQSRIntegrationTest.java | 48 +++++++------------ .../sting/utils/recalibration/BQSR.R | 2 +- .../sting/utils/exceptions/UserException.java | 6 +++ 10 files changed, 57 insertions(+), 107 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java index b6f911753..7a7527dd1 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java @@ -432,7 +432,7 @@ public final class AnalyzeCovariates extends RodWalker diffs = exampleEntry.getValue().getRAC().compareReportArguments( reportEntries[i].getValue().getRAC(),exampleEntry.getKey(),reportEntries[i].getKey()); if (diffs.size() != 0) { - throw new UserException("There are differences in relevant arguments of" + throw new UserException.IncompatibleRecalibrationTableParameters("There are differences in relevant arguments of" + " two or more input recalibration reports. Please make sure" + " they have been created using the same recalibration parameters." + " " + Utils.join("// ", reportDifferencesStringArray(diffs))); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java index 7727c2dac..d6f0e16e8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java @@ -92,18 +92,6 @@ public class BQSRGatherer extends Gatherer { generalReport.calculateQuantizedQualities(); - RecalibrationArgumentCollection RAC = generalReport.getRAC(); - if ( RAC.RECAL_PDF_FILE != null ) { - RAC.RECAL_TABLE_FILE = output; - if ( RAC.existingRecalibrationReport != null ) { - final RecalibrationReport originalReport = new RecalibrationReport(RAC.existingRecalibrationReport); - RecalUtils.generateRecalibrationPlot(RAC, originalReport.getRecalibrationTables(), generalReport.getRecalibrationTables(), generalReport.getRequestedCovariates()); - } - else { - RecalUtils.generateRecalibrationPlot(RAC, generalReport.getRecalibrationTables(), generalReport.getRequestedCovariates()); - } - } - generalReport.output(outputFile); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index 41d3f3991..3882b70fa 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -180,11 +180,6 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche public void initialize() { baq = new BAQ(BAQGOP); // setup the BAQ object with the provided gap open penalty - if (RAC.RECAL_PDF_FILE != null) { - Utils.warnUser("This is not the recommended way to generate recalibration plots any longer and will be" - + " discontinued soon in future releases. Please use the 'AnalyzeCovariates' tool instead from now one"); - } - if (RAC.FORCE_PLATFORM != null) RAC.DEFAULT_PLATFORM = RAC.FORCE_PLATFORM; @@ -522,11 +517,6 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche generateReport(); logger.info("...done!"); - if ( RAC.RECAL_PDF_FILE != null ) { - logger.info("Generating recalibration plots..."); - generatePlots(); - } - logger.info("BaseRecalibrator was able to recalibrate " + result + " reads"); } @@ -534,16 +524,6 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche return recalibrationEngine.getFinalRecalibrationTables(); } - private void generatePlots() { - File recalFile = getToolkit().getArguments().BQSR_RECAL_FILE; - if (recalFile != null) { - RecalibrationReport report = new RecalibrationReport(recalFile); - RecalUtils.generateRecalibrationPlot(RAC, report.getRecalibrationTables(), getRecalibrationTable(), requestedCovariates); - } - else - RecalUtils.generateRecalibrationPlot(RAC, getRecalibrationTable(), requestedCovariates); - } - /** * go through the quality score table and use the # observations and the empirical quality score * to build a quality score histogram for quantization. Then use the QuantizeQual algorithm to diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java index c1ecb2320..b9f16132c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java @@ -89,21 +89,6 @@ public class RecalibrationArgumentCollection implements Cloneable { public File RECAL_TABLE_FILE = null; public PrintStream RECAL_TABLE; - /** - * If not provided, then no plots will be generated (useful for queue scatter/gathering). - * However, we *highly* recommend that users generate these plots whenever possible for QC checking. - */ - @Output(fullName = "plot_pdf_file", shortName = "plots", doc = "The output recalibration pdf file to create", required = false, defaultToStdout = false) - public File RECAL_PDF_FILE = null; - - /** - * If not provided, then a temporary file is created and then deleted upon completion. - * For advanced users only. - */ - @Advanced - @Argument(fullName = "intermediate_csv_file", shortName = "intermediate", doc = "The intermediate csv file to create", required = false) - public File RECAL_CSV_FILE = null; - /** * Note that the --list argument requires a fully resolved and correct command-line to work. */ @@ -284,8 +269,6 @@ public class RecalibrationArgumentCollection implements Cloneable { argumentsTable.set("quantizing_levels", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, QUANTIZING_LEVELS); argumentsTable.addRowID("recalibration_report", true); argumentsTable.set("recalibration_report", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, existingRecalibrationReport == null ? "null" : existingRecalibrationReport.getAbsolutePath()); - argumentsTable.addRowID("plot_pdf_file", true); - argumentsTable.set("plot_pdf_file", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, RECAL_PDF_FILE == null ? "null" : RECAL_PDF_FILE.getAbsolutePath()); argumentsTable.addRowID("binary_tag_name", true); argumentsTable.set("binary_tag_name", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, BINARY_TAG_NAME == null ? "null" : BINARY_TAG_NAME); return argumentsTable; diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java index 8908ce4a4..56f7e8257 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java @@ -550,36 +550,48 @@ public class RecalUtils { executor.exec(); } - private static void outputRecalibrationPlot(final RecalibrationArgumentCollection RAC) { + private static void outputRecalibrationPlot(final File csvFile, final RecalibrationArgumentCollection RAC) { final RScriptExecutor executor = new RScriptExecutor(); executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); - executor.addArgs(RAC.RECAL_CSV_FILE.getAbsolutePath()); + executor.addArgs(csvFile.getAbsolutePath()); executor.addArgs(RAC.RECAL_TABLE_FILE.getAbsolutePath()); - executor.addArgs(RAC.RECAL_PDF_FILE.getAbsolutePath()); executor.exec(); } + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final Covariate[] requestedCovariates) { generateRecalibrationPlot(RAC, original, null, requestedCovariates); } + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final RecalibrationTables recalibrated, final Covariate[] requestedCovariates) { - final PrintStream csvFile; + final PrintStream csvStream; + final File csvTempFile = null; try { - if ( RAC.RECAL_CSV_FILE == null ) { - RAC.RECAL_CSV_FILE = File.createTempFile("BQSR", ".csv"); - RAC.RECAL_CSV_FILE.deleteOnExit(); - } - csvFile = new PrintStream(RAC.RECAL_CSV_FILE); + File csvTmpFile = File.createTempFile("BQSR",".csv"); + csvTmpFile.deleteOnExit(); + csvStream = new PrintStream(csvTmpFile); } catch (IOException e) { - throw new UserException.CouldNotCreateOutputFile(RAC.RECAL_CSV_FILE, e); + throw new UserException("Could not create temporary csv file", e); } if ( recalibrated != null ) - writeCSV(csvFile, recalibrated, "RECALIBRATED", requestedCovariates, true); - writeCSV(csvFile, original, "ORIGINAL", requestedCovariates, recalibrated == null); - outputRecalibrationPlot(RAC); + writeCSV(csvStream, recalibrated, "RECALIBRATED", requestedCovariates, true); + writeCSV(csvStream, original, "ORIGINAL", requestedCovariates, recalibrated == null); + csvStream.close(); + outputRecalibrationPlot(csvTempFile, RAC); + csvTempFile.delete(); } private static void writeCSV(final PrintStream deltaTableFile, final RecalibrationTables recalibrationTables, final String recalibrationMode, final Covariate[] requestedCovariates, final boolean printHeader) { diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java index ed9afa733..091b5ecf0 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java @@ -340,9 +340,6 @@ public class RecalibrationReport { else if (argument.equals("recalibration_report")) RAC.existingRecalibrationReport = (value == null) ? null : new File((String) value); - else if (argument.equals("plot_pdf_file")) - RAC.RECAL_PDF_FILE = (value == null) ? null : new File((String) value); - else if (argument.equals("binary_tag_name")) RAC.BINARY_TAG_NAME = (value == null) ? null : (String) value; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java index 8c327efc0..95ce80848 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java @@ -77,18 +77,18 @@ public class AnalyzeCovariatesIntegrationTest extends WalkerTest { /** * File containing the before report for normal testing. */ - private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.grp"); + private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.table"); /** * File containing the after report for normal testing. */ - private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.grp"); + private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.table"); /** * File containing the bqsr report for normal testing. */ - private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.grp"); + private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.table"); /** * Test the content of the generated csv file. @@ -150,7 +150,7 @@ public class AnalyzeCovariatesIntegrationTest extends WalkerTest { final File afterFile = new File(TEST_DATA_DIR,afterFileName); final WalkerTestSpec spec = new WalkerTestSpec( buildCommandLine(null,"%s",true,true,afterFile), - 1,UserException.class); + 1,UserException.IncompatibleRecalibrationTableParameters.class); executeTest("testParameterChangeException - " + description, spec); } @@ -237,10 +237,10 @@ public class AnalyzeCovariatesIntegrationTest extends WalkerTest { * Triplets < alfter-grp-file, whether it should fail, what is different > */ private final Object[][] DIFFERENT_PARAMETERS_AFTER_FILES = { - {"after-cov.grp", true, "Adds additional covaraite: repeat-length"}, - {"after-dpSOLID.grp", true, "Change the default platform to SOLID"}, - {"after-noDp.grp",true, "Unset the default platform"}, - {"after-mcs4grp", true, "Changed -mcs parameter from 2 to 4"} + {"after-cov.table", true, "Adds additional covariate: repeat-length" }, + {"after-dpSOLID.table", true, "Change the default platform to SOLID" }, + {"after-noDp.table",true, "Unset the default platform" }, + {"after-mcs4.table", true, "Changed -mcs parameter from 2 to 4" } }; /** diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java index 71c29fe0b..05183a521 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java @@ -100,23 +100,23 @@ public class BQSRIntegrationTest extends WalkerTest { @DataProvider(name = "BQSRTest") public Object[][] createBQSRTestData() { return new Object[][]{ - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "61fd466b5e94d2d67e116f6f67c9f939")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "e08b5bcdb64f4beea03730e5631a14ca")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "448a45dc154c95d1387cb5cdddb67071")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "c1e7999e445d51bbe2e775dac5325643")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "a57c16918cdfe12d55a89c21bf195279")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "836dccacf48ccda6b2843d07e8f1ef4d")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "0fb2aedc2f8d66b5821cb570f15a8c4d")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "85a120b7d86b61597b86b9e93decbdfc")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "5248dc49aec0323c74b496bb4928c73c")}, - {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "cb52f267e0010f849f50b0bf1de474a1")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "fb372d0a8fc41b01ced1adab31546850")}, - {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "c1c3cda8caceed619d3d439c3990cd26")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "5bfff0c699345cca12a9b33acf95588f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "f805a0020eea987b79f314fa99913806")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "86075d3856eb06816a0dd81af55e421f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "155802237e1fc7a001398b8f4bcf4b72")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "38c7916cc019fe8d134df67639422b42")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "b74e75f3c5aa90bd21af1e20f2ac8c40")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "e564505aea11464de8ed72890d9ea89a")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "380d8be121ffaddd3461ee0ac3d1a76f")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "281682124584ab384f23359934df0c3b")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "0a92fdff5fd26227c29d34eda5a32f49")}, + {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "90d8c24077e8ae9a0037a9aad5f09e31")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "c41ef02c640ef1fed4bfc03b9b33b616")}, + {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "b577cd1d529425f66db49620db09fdca")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "9ad49269c0156f8ab1173261bf23e600")}, // make sure we work with ION torrent bam - {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "7375c7b692e76b651c278a9fb478fa1c")}, + {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "04bfa4760767022e7f5252e6e4432cc1")}, }; } @@ -141,22 +141,6 @@ public class BQSRIntegrationTest extends WalkerTest { executeTest("testBQSRFailWithoutDBSNP", spec); } - @Test - public void testBQSRCSV() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( - " -T BaseRecalibrator" + - " -R " + b36KGReference + - " -I " + validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.bam" + - " -knownSites " + b36dbSNP129 + - " -L 1:10,000,000-10,200,000" + - " -o /dev/null" + - " -sortAllCols" + - " --plot_pdf_file /dev/null" + - " --intermediate_csv_file %s", - Arrays.asList("90ad19143024684e3c4410dc8fd2bd9d")); - executeTest("testBQSR-CSVfile", spec); - } - @Test public void testBQSRFailWithSolidNoCall() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( diff --git a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R index bc53e29dc..b0055dd10 100644 --- a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R +++ b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R @@ -85,7 +85,7 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate")) + - ylab("Mean Quality Score") + ylim(0,max(42,d$AverageReportedQuality)); + ylab("Mean Quality Score") + ylim(0,max(42,d$AverageReportedQuality)) + blankTheme e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) diff --git a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java index 0e95fd158..6126116c2 100644 --- a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java +++ b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java @@ -471,4 +471,10 @@ public class UserException extends ReviewedStingException { super(message,innerException); } } + + public static class IncompatibleRecalibrationTableParameters extends UserException { + public IncompatibleRecalibrationTableParameters(String s) { + super(s); + } + } } From 0672ac50322ccc57fee96ebda3e50480404a665d Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 19 Jun 2013 19:42:09 -0400 Subject: [PATCH 81/99] Fix public / protected dependency --- ...rotectedEngineFeaturesIntegrationTest.java | 93 +++++++++++++++++++ .../gatk/EngineFeaturesIntegrationTest.java | 34 ------- 2 files changed, 93 insertions(+), 34 deletions(-) create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java diff --git a/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java new file mode 100644 index 000000000..680706802 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java @@ -0,0 +1,93 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk; + +import net.sf.samtools.util.BlockCompressedInputStream; +import org.broad.tribble.readers.AsciiLineReader; +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub; +import org.broadinstitute.variant.vcf.VCFCodec; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileInputStream; +import java.util.Arrays; + +/** + * + */ +public class ProtectedEngineFeaturesIntegrationTest extends WalkerTest { + @Test(enabled = true) + public void testGATKVersionInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000", + 1, Arrays.asList("")); + final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + final VCFHeaderLine versionLine = header.getMetaDataLine(VariantContextWriterStub.GATK_VERSION_KEY); + Assert.assertNotNull(versionLine); + Assert.assertEquals(versionLine.getValue(), CommandLineGATK.getVersionNumber()); + } + + @Test(enabled = true) + public void testCompressedVCFOutputWithNT() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000-10,100,000 -nt 4", + 1, Arrays.asList("vcf.gz"), Arrays.asList("")); + final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); + final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); + int nLines = 0; + while ( reader.readLine() != null ) + nLines++; + Assert.assertTrue(nLines > 0); + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index c97ab7301..541fb78c0 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -27,13 +27,10 @@ package org.broadinstitute.sting.gatk; import net.sf.samtools.SAMFileReader; import net.sf.samtools.SAMRecord; -import net.sf.samtools.util.BlockCompressedInputStream; -import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; -import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ReadFilters; import org.broadinstitute.sting.gatk.walkers.ReadWalker; @@ -42,15 +39,11 @@ import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; -import org.broadinstitute.variant.vcf.VCFCodec; -import org.broadinstitute.variant.vcf.VCFHeader; -import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; -import java.io.FileInputStream; import java.io.PrintStream; import java.util.Arrays; @@ -204,33 +197,6 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { executeTest("badCompress " + compress, spec); } - @Test(enabled = true) - public void testGATKVersionInVCF() throws Exception { - WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " - + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" - + " -o %s -L 20:10,000,000", - 1, Arrays.asList("")); - final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); - final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); - final VCFHeaderLine versionLine = header.getMetaDataLine(VariantContextWriterStub.GATK_VERSION_KEY); - Assert.assertNotNull(versionLine); - Assert.assertEquals(versionLine.getValue(), CommandLineGATK.getVersionNumber()); - } - - @Test(enabled = true) - public void testCompressedVCFOutputWithNT() throws Exception { - WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " - + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" - + " -o %s -L 20:10,000,000-10,100,000 -nt 4", - 1, Arrays.asList("vcf.gz"), Arrays.asList("")); - final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); - final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); - int nLines = 0; - while ( reader.readLine() != null ) - nLines++; - Assert.assertTrue(nLines > 0); - } - // -------------------------------------------------------------------------------- // // Test that defaultBaseQualities actually works From fdfe4e41d5d8c92fad74f56e654992f3a97ab602 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 20 Jun 2013 11:19:13 -0400 Subject: [PATCH 82/99] Better GATK version and command line output -- Previous version emitted command lines that look like: ##HaplotypeCaller="analysis_type=HaplotypeCaller input_file=[private/testdata/reduced.readNotFullySpanningDeletion.bam] ..." the new version provides additional information on when the GATK was run and the GATK version in a nicer format: ##GATKCommandLine= -- Additionally, the command line options are emitted sequentially in the file, so you can see a running record of how a VCF was produced, such as this example from the integration test: ##GATKCommandLine= ##GATKCommandLine= -- Removed the ProtectedEngineFeaturesIntegrationTest -- Actual unit tests for these features! --- ...rotectedEngineFeaturesIntegrationTest.java | 93 ------------------- .../UnifiedGenotyperIntegrationTest.java | 18 ++++ .../sting/gatk/GenomeAnalysisEngine.java | 8 ++ .../io/stubs/VariantContextWriterStub.java | 36 +------ .../sting/utils/variant/GATKVCFUtils.java | 27 +++++- .../gatk/EngineFeaturesIntegrationTest.java | 54 +++++++++++ .../utils/variant/GATKVCFUtilsUnitTest.java | 86 +++++++++++++++++ 7 files changed, 197 insertions(+), 125 deletions(-) delete mode 100644 protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java create mode 100644 public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java diff --git a/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java deleted file mode 100644 index 680706802..000000000 --- a/protected/java/test/org/broadinstitute/sting/gatk/ProtectedEngineFeaturesIntegrationTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. -*/ - -package org.broadinstitute.sting.gatk; - -import net.sf.samtools.util.BlockCompressedInputStream; -import org.broad.tribble.readers.AsciiLineReader; -import org.broadinstitute.sting.WalkerTest; -import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub; -import org.broadinstitute.variant.vcf.VCFCodec; -import org.broadinstitute.variant.vcf.VCFHeader; -import org.broadinstitute.variant.vcf.VCFHeaderLine; -import org.testng.Assert; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.FileInputStream; -import java.util.Arrays; - -/** - * - */ -public class ProtectedEngineFeaturesIntegrationTest extends WalkerTest { - @Test(enabled = true) - public void testGATKVersionInVCF() throws Exception { - WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " - + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" - + " -o %s -L 20:10,000,000", - 1, Arrays.asList("")); - final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); - final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); - final VCFHeaderLine versionLine = header.getMetaDataLine(VariantContextWriterStub.GATK_VERSION_KEY); - Assert.assertNotNull(versionLine); - Assert.assertEquals(versionLine.getValue(), CommandLineGATK.getVersionNumber()); - } - - @Test(enabled = true) - public void testCompressedVCFOutputWithNT() throws Exception { - WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " - + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" - + " -o %s -L 20:10,000,000-10,100,000 -nt 4", - 1, Arrays.asList("vcf.gz"), Arrays.asList("")); - final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); - final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); - int nLines = 0; - while ( reader.readLine() != null ) - nLines++; - Assert.assertTrue(nLines > 0); - } -} \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index 3eb9b4e1c..532982853 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -46,11 +46,15 @@ package org.broadinstitute.sting.gatk.walkers.genotyper; +import net.sf.samtools.util.BlockCompressedInputStream; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.Assert; import org.testng.annotations.Test; +import java.io.File; import java.util.Arrays; import java.util.Collections; @@ -302,4 +306,18 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { executeTest("test calling on reads with Ns in CIGAR", spec); } + + @Test(enabled = true) + public void testCompressedVCFOutputWithNT() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000-10,100,000 -nt 4", + 1, Arrays.asList("vcf.gz"), Arrays.asList("")); + final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); + final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); + int nLines = 0; + while ( reader.readLine() != null ) + nLines++; + Assert.assertTrue(nLines > 0); + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index 6fa1b741c..c4f1a286d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -170,6 +170,14 @@ public class GenomeAnalysisEngine { this.walker = walker; } + /** + * The short name of the current GATK walker as a string + * @return a non-null String + */ + public String getWalkerName() { + return getWalkerName(walker.getClass()); + } + /** * A processed collection of SAM reader identifiers. */ diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java index 8b7c4282b..3e3d6de41 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/stubs/VariantContextWriterStub.java @@ -26,18 +26,15 @@ package org.broadinstitute.sting.gatk.io.stubs; import net.sf.samtools.SAMSequenceDictionary; -import org.broadinstitute.sting.gatk.CommandLineExecutable; -import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.io.OutputTracker; -import org.broadinstitute.sting.utils.classloader.JVMUtils; import org.broadinstitute.sting.utils.variant.GATKVCFUtils; -import org.broadinstitute.variant.vcf.VCFHeader; -import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.writer.Options; import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter; import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; import java.io.File; import java.io.OutputStream; @@ -54,7 +51,6 @@ import java.util.List; * @version 0.1 */ public class VariantContextWriterStub implements Stub, VariantContextWriter { - public final static String GATK_VERSION_KEY = "GATKVersion"; public final static boolean UPDATE_CONTIG_HEADERS = true; /** @@ -227,14 +223,9 @@ public class VariantContextWriterStub implements Stub, Var if ( header.isWriteEngineHeaders() ) { // skip writing the command line header if requested if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) { - // write the GATK version if we have command line information enabled - vcfHeader.addMetaDataLine(getGATKVersionHeaderLine()); - - // Check for the command-line argument header line. If not present, add it in. - final VCFHeaderLine commandLineArgHeaderLine = getCommandLineArgumentHeaderLine(); - final boolean foundCommandLineHeaderLine = vcfHeader.getMetaDataLine(commandLineArgHeaderLine.getKey()) != null; - if ( ! foundCommandLineHeaderLine ) - vcfHeader.addMetaDataLine(commandLineArgHeaderLine); + // Always add the header line, as the current format allows multiple entries + final VCFHeaderLine commandLineArgHeaderLine = GATKVCFUtils.getCommandLineArgumentHeaderLine(engine, argumentSources); + vcfHeader.addMetaDataLine(commandLineArgHeaderLine); } if ( UPDATE_CONTIG_HEADERS ) @@ -280,21 +271,4 @@ public class VariantContextWriterStub implements Stub, Var getOutputFile() != null && // that are going to disk engine.getArguments().generateShadowBCF; // and we actually want to do it } - - /** - * Gets the appropriately formatted header for a VCF file - * @return VCF file header. - */ - private VCFHeaderLine getCommandLineArgumentHeaderLine() { - CommandLineExecutable executable = JVMUtils.getObjectOfType(argumentSources,CommandLineExecutable.class); - return new VCFHeaderLine(executable.getAnalysisName(), "\"" + engine.createApproximateCommandLineArgumentString(argumentSources.toArray()) + "\""); - } - - /** - * Gets the GATK version header line for the VCF file - * @return non-null VCFHeaderLine. - */ - private VCFHeaderLine getGATKVersionHeaderLine() { - return new VCFHeaderLine(GATK_VERSION_KEY, CommandLineGATK.getVersionNumber()); - } } diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java index aa2e92559..09db585a6 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java @@ -30,10 +30,10 @@ import org.broad.tribble.FeatureCodec; import org.broad.tribble.FeatureCodecHeader; import org.broad.tribble.readers.PositionalBufferedStream; import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.utils.collections.Pair; -import org.broadinstitute.variant.bcf2.BCF2Codec; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.vcf.*; @@ -52,6 +52,31 @@ public class GATKVCFUtils { */ private GATKVCFUtils() { } + public final static String GATK_COMMAND_LINE_KEY = "GATKCommandLine"; + + /** + * Gets the appropriately formatted header for a VCF file describing this GATK run + * + * @param engine the GATK engine that holds the walker name, GATK version, and other information + * @param argumentSources contains information on the argument values provided to the GATK for converting to a + * command line string. Should be provided from the data in the parsing engine. Can be + * empty in which case the command line will be the empty string. + * @return VCF header line describing this run of the GATK. + */ + public static VCFHeaderLine getCommandLineArgumentHeaderLine(final GenomeAnalysisEngine engine, final Collection argumentSources) { + if ( engine == null ) throw new IllegalArgumentException("engine cannot be null"); + if ( argumentSources == null ) throw new IllegalArgumentException("argumentSources cannot be null"); + + final Map attributes = new LinkedHashMap<>(); + attributes.put("ID", engine.getWalkerName()); + attributes.put("Version", CommandLineGATK.getVersionNumber()); + final Date date = new Date(); + attributes.put("Date", date.toString()); + attributes.put("Epoch", Long.toString(date.getTime())); + attributes.put("CommandLineOptions", engine.createApproximateCommandLineArgumentString(argumentSources.toArray())); + return new VCFSimpleHeaderLine(GATK_COMMAND_LINE_KEY, attributes, Collections.emptyList()); + } + public static Map getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, List> rodBindings) { // Collect the eval rod names final Set names = new TreeSet(); diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 541fb78c0..aca6cf984 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk; import net.sf.samtools.SAMFileReader; import net.sf.samtools.SAMRecord; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; @@ -39,11 +40,16 @@ import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; +import org.broadinstitute.sting.utils.variant.GATKVCFUtils; +import org.broadinstitute.variant.vcf.VCFCodec; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; +import java.io.FileInputStream; import java.io.PrintStream; import java.util.Arrays; @@ -197,6 +203,54 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { executeTest("badCompress " + compress, spec); } + // -------------------------------------------------------------------------------- + // + // Test that the VCF version key is what we expect + // + // -------------------------------------------------------------------------------- + @Test(enabled = true) + public void testGATKVersionInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf" + + " -o %s -L 20:61098", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY); + Assert.assertNotNull(versionLine); + Assert.assertTrue(versionLine.toString().contains("SelectVariants")); + } + + @Test(enabled = true) + public void testMultipleGATKVersionsInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "gatkCommandLineInHeader.vcf" + + " -o %s", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + + boolean foundHC = false; + boolean foundSV = false; + for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) { + if ( line.getKey().equals(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) { + if ( line.toString().contains("HaplotypeCaller") ) { + Assert.assertFalse(foundHC); + foundHC = true; + } + if ( line.toString().contains("SelectVariants") ) { + Assert.assertFalse(foundSV); + foundSV = true; + } + } + } + + Assert.assertTrue(foundHC, "Didn't find HaplotypeCaller command line header field"); + Assert.assertTrue(foundSV, "Didn't find SelectVariants command line header field"); + } + // -------------------------------------------------------------------------------- // // Test that defaultBaseQualities actually works diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java new file mode 100644 index 000000000..051d0bcec --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java @@ -0,0 +1,86 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.variant; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.gatk.walkers.Walker; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Set; + +public class GATKVCFUtilsUnitTest extends BaseTest { + public static class VCFHeaderTestWalker extends RodWalker { + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { return null; } + public Integer reduceInit() { return 0; } + public Integer reduce(Integer value, Integer sum) { return value + sum; } + } + + public static class VCFHeaderTest2Walker extends VCFHeaderTestWalker {} + + @Test + public void testAddingVCFHeaderInfo() { + final VCFHeader header = new VCFHeader(); + + final Walker walker1 = new VCFHeaderTestWalker(); + final Walker walker2 = new VCFHeaderTest2Walker(); + + final GenomeAnalysisEngine testEngine1 = new GenomeAnalysisEngine(); + testEngine1.setWalker(walker1); + + final GenomeAnalysisEngine testEngine2 = new GenomeAnalysisEngine(); + testEngine2.setWalker(walker2); + + final VCFHeaderLine line1 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine1, Collections.EMPTY_LIST); + logger.warn(line1); + Assert.assertNotNull(line1); + Assert.assertEquals(line1.getKey(), GATKVCFUtils.GATK_COMMAND_LINE_KEY); + for ( final String field : Arrays.asList("Version", "ID", "Date", "CommandLineOptions")) + Assert.assertTrue(line1.toString().contains(field), "Couldn't find field " + field + " in " + line1.getValue()); + Assert.assertTrue(line1.toString().contains("ID=" + testEngine1.getWalkerName())); + + final VCFHeaderLine line2 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine2, Collections.EMPTY_LIST); + logger.warn(line2); + + header.addMetaDataLine(line1); + final Set lines1 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines1.contains(line1)); + + header.addMetaDataLine(line2); + final Set lines2 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines2.contains(line1)); + Assert.assertTrue(lines2.contains(line2)); + } +} \ No newline at end of file From 0018af0c0af3100d220315cc0b21b76b86f0e415 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Thu, 20 Jun 2013 13:08:29 -0400 Subject: [PATCH 83/99] Update README file for the 2.6 release --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 13b3c0c6e..2c245a214 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,3 @@ -gsa-unstable +The Genome Analysis Toolkit ============ See http://www.broadinstitute.org/gatk/ From dee51c4189a35d2237742d832f817f2c4d8635f6 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 21 Jun 2013 09:25:57 -0400 Subject: [PATCH 85/99] Error out when NCT and BAMOUT are used with the HaplotypeCaller -- Currently we don't support writing a BAM file from the haplotype caller when nct is enabled. Check in initialize if this is the case, and throw a UserException --- .../sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 9b9c3924b..db1ca552a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -572,8 +572,12 @@ public class HaplotypeCaller extends ActiveRegionWalker, In genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, variantMerger ); - if ( bamWriter != null ) + if ( bamWriter != null ) { + // we currently do not support multi-threaded BAM writing, so exception out + if ( getToolkit().getTotalNumberOfThreads() > 1 ) + throw new UserException.BadArgumentValue("bamout", "Currently cannot emit a BAM file from the HaplotypeCaller in multi-threaded mode."); haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); + } trimmer = new ActiveRegionTrimmer(DEBUG, PADDING_AROUND_SNPS_FOR_CALLING, PADDING_AROUND_OTHERS_FOR_CALLING, UAC.GenotypingMode.equals(GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES) ? MAX_GGA_ACTIVE_REGION_EXTENSION : MAX_DISCOVERY_ACTIVE_REGION_EXTENSION, From f726d8130a076202cf2246a9fdb5123e0e91af30 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 20 Jun 2013 15:15:17 -0400 Subject: [PATCH 86/99] VariantRecalibrator bugfix for bad log10sumlog10 values -- The VR, when the model is bad, may evaluate log10sumlog10 where some of the values in the vector are NaN. This case is now trapped in VR and handled as previously -- indicating that the model has failed and evaluation continues. --- .../GaussianMixtureModel.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java index 92b0d4df2..efc24d5f9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java @@ -225,6 +225,20 @@ public class GaussianMixtureModel { isModelReadyForEvaluation = true; } + /** + * A version of Log10SumLog10 that tolerates NaN values in the array + * + * In the case where one or more of the values are NaN, this function returns NaN + * + * @param values a non-null vector of doubles + * @return log10 of the sum of the log10 values, or NaN + */ + private double nanTolerantLog10SumLog10(final double[] values) { + for ( final double value : values ) + if ( Double.isNaN(value) ) return Double.NaN; + return MathUtils.log10sumLog10(values); + } + public double evaluateDatum( final VariantDatum datum ) { for( final boolean isNull : datum.isNull ) { if( isNull ) { return evaluateDatumMarginalized( datum ); } @@ -235,7 +249,7 @@ public class GaussianMixtureModel { for( final MultivariateGaussian gaussian : gaussians ) { pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + gaussian.evaluateDatumLog10( datum ); } - return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) + return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } // Used only to decide which covariate dimension is most divergent in order to report in the culprit info field annotation @@ -247,7 +261,7 @@ public class GaussianMixtureModel { for( final MultivariateGaussian gaussian : gaussians ) { pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + MathUtils.normalDistributionLog10(gaussian.mu[iii], gaussian.sigma.get(iii, iii), datum.annotations[iii]); } - return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) + return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } public double evaluateDatumMarginalized( final VariantDatum datum ) { From 8caf39cb657ac341539eb860ddf5507a427164b5 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 20 Jun 2013 12:57:14 -0400 Subject: [PATCH 87/99] Experimental LikelihoodRankSum annotation -- Added experimental LikelihoodRankSum, which required slightly more detailed access to the information managed by the base class, so added an overloaded getElementForRead also provides access to the MostLikelyAllele class -- Added base class default implementation of getElementForPileupElement() which returns null, indicating that the pileup version isn't supported. -- Added @Override to many of the RankSum classes for safety's sake -- Updates to GeneralCallingPipeline: annotate sites with dbSNP IDs, -- R script to assess the value of annotations for VQSR --- .../annotator/BaseQualityRankSumTest.java | 4 + .../annotator/ClippingRankSumTest.java | 9 +-- .../annotator/LikelihoodRankSumTest.java | 79 +++++++++++++++++++ .../annotator/MappingQualityRankSumTest.java | 5 +- .../gatk/walkers/annotator/RankSumTest.java | 21 ++++- .../walkers/annotator/ReadPosRankSumTest.java | 8 +- 6 files changed, 114 insertions(+), 12 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java index 534834d0e..9ba468191 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java @@ -65,14 +65,18 @@ import java.util.*; *

The base quality rank sum test can not be calculated for sites without a mixture of reads showing both the reference and alternate alleles.

*/ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnotation { + @Override public List getKeyNames() { return Arrays.asList("BaseQRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("BaseQRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities")); } + @Override protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { return (double)read.getBaseQualities()[ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, refLoc, ReadUtils.ClippingTail.RIGHT_TAIL)]; } + @Override protected Double getElementForPileupElement(final PileupElement p) { return (double)p.getQual(); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java index 68e983bb8..eaa9df128 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java @@ -66,17 +66,14 @@ import java.util.*; * @since 6/28/12 */ public class ClippingRankSumTest extends RankSumTest { - + @Override public List getKeyNames() { return Arrays.asList("ClippingRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ClippingRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases")); } + @Override protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { return (double)AlignmentUtils.getNumHardClippedBases(read); } - - protected Double getElementForPileupElement(final PileupElement p) { - // TODO - we only support the non-pileup case for now, e.g. an active-region based version - return null; - } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java new file mode 100644 index 000000000..c7fff5a7f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java @@ -0,0 +1,79 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.variant.vcf.VCFHeaderLineType; +import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; + +import java.util.Arrays; +import java.util.List; + +/** + * U-based z-approximation from the Mann-Whitney Rank Sum Test contrasting the likelihoods of reads to their + * most likely haplotypes. This is effectively testing for a differentiate quality in the modeling of the alt + * allele than the reference allele. + */ +public class LikelihoodRankSumTest extends RankSumTest { + @Override + public List getKeyNames() { return Arrays.asList("LikelihoodRankSum"); } + + @Override + public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("LikelihoodRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref haplotype likelihoods")); } + + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc, final MostLikelyAllele mostLikelyAllele) { + if ( ! mostLikelyAllele.isInformative() ) throw new IllegalStateException("Should never have seen non-informative read " + read + " MostLikelyAllele " + mostLikelyAllele); + return mostLikelyAllele.getLog10LikelihoodOfMostLikely(); + } + + @Override + protected Double getElementForRead(GATKSAMRecord read, int refLoc) { + throw new IllegalStateException("This method should never have been called as getElementForRead(read,refloc,mostLikelyAllele) was overloaded"); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java index 0ebb09961..b2a504eb2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java @@ -64,15 +64,18 @@ import java.util.*; *

The mapping quality rank sum test can not be calculated for sites without a mixture of reads showing both the reference and alternate alleles.

*/ public class MappingQualityRankSumTest extends RankSumTest implements StandardAnnotation { - + @Override public List getKeyNames() { return Arrays.asList("MQRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("MQRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities")); } + @Override protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { return (double)read.getMappingQuality(); } + @Override protected Double getElementForPileupElement(final PileupElement p) { return (double)p.getRead().getMappingQuality(); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java index 37508fc06..1ba13afa1 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java @@ -176,7 +176,7 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR final GATKSAMRecord read = el.getKey(); if ( isUsableRead(read, refLoc) ) { - final Double value = getElementForRead(read, refLoc); + final Double value = getElementForRead(read, refLoc, a); if ( value == null ) continue; @@ -188,6 +188,18 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR } } + /** + * Get the element for the given read at the given reference position + * + * @param read the read + * @param refLoc the reference position + * @param mostLikelyAllele the most likely allele for this read + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc, final MostLikelyAllele mostLikelyAllele) { + return getElementForRead(read, refLoc); + } + /** * Get the element for the given read at the given reference position * @@ -202,10 +214,15 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR /** * Get the element for the given read at the given reference position * + * By default this function returns null, indicating that the test doesn't support the old style of pileup calculations + * * @param p the pileup element * @return a Double representing the element to be used in the rank sum test, or null if it should not be used */ - protected abstract Double getElementForPileupElement(final PileupElement p); + protected Double getElementForPileupElement(final PileupElement p) { + // does not work in pileup mode + return null; + } /** * Can the base in this pileup element be used in comparative tests between ref / alt bases? diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java index 37faaed22..f4528b16f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java @@ -71,14 +71,15 @@ import java.util.*; */ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotation { - public List getKeyNames() { - return Arrays.asList("ReadPosRankSum"); - } + @Override + public List getKeyNames() { return Arrays.asList("ReadPosRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias")); } + @Override protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) @@ -91,6 +92,7 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return (double)readPos; } + @Override protected Double getElementForPileupElement(final PileupElement p) { final int offset = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); return (double)getFinalReadPosition(p.getRead(), offset); From d976aae2b141567f8a91f53fe45b4925fff797c3 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Fri, 21 Jun 2013 16:59:22 -0400 Subject: [PATCH 88/99] Another fix for the Indel Realigner that arises because of secondary alignments. This time we don't accidentally drop reads (phew), but this bug does cause us not to update the alignment start of the mate. Fixed and added unit test to cover it. --- .../indels/ConstrainedMateFixingManager.java | 8 ++++-- .../ConstrainedMateFixingManagerUnitTest.java | 28 ++++++++++++++++++- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java index c98fe4d3c..4d50ef951 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java @@ -130,7 +130,7 @@ public class ConstrainedMateFixingManager { private static final boolean DEBUG = false; /** How often do we check whether we want to emit reads? */ - private final static int EMIT_FREQUENCY = 1000; + protected final static int EMIT_FREQUENCY = 1000; /** * How much could a single read move in position from its original position? @@ -324,7 +324,8 @@ public class ConstrainedMateFixingManager { || noReadCanMoveBefore(read.getMateAlignmentStart(), newRead ) ) ) { // we're already past where the mate started // remove reads from the map that we have emitted -- useful for case where the mate never showed up - forMateMatching.remove(read.getReadName()); + if ( !read.getNotPrimaryAlignmentFlag() ) + forMateMatching.remove(read.getReadName()); if ( DEBUG ) logger.warn(String.format("EMIT! At %d: read %s at %d with isize %d, mate start %d, op = %s", @@ -346,7 +347,8 @@ public class ConstrainedMateFixingManager { private void writeRead(SAMRecord read) { try { - writer.addAlignment(read); + if ( writer != null ) + writer.addAlignment(read); } catch (IllegalArgumentException e) { throw new UserException("If the maximum allowable reads in memory is too small, it may cause reads to be written out of order when trying to write the BAM; please see the --maxReadsInMemory argument for details. " + e.getMessage(), e); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java index 9bcd7a3a3..0f910507e 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java @@ -66,9 +66,10 @@ public class ConstrainedMateFixingManagerUnitTest extends BaseTest { @BeforeClass public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 100); + header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000); genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); } + @Test public void testSecondaryAlignmentsDoNotInterfere() { final List properReads = ArtificialSAMUtils.createPair(header, "foo", 1, 10, 30, true, false); @@ -105,4 +106,29 @@ public class ConstrainedMateFixingManagerUnitTest extends BaseTest { } } + @Test + public void testSecondaryAlignmentsDoNotCauseAccidentalRemovalOfMate() { + final List properReads = ArtificialSAMUtils.createPair(header, "foo", 1, 530, 1594, true, false); + final GATKSAMRecord read1 = properReads.get(0); + read1.setFlags(99); // first in proper pair, mate negative strand + + final GATKSAMRecord read2Primary = properReads.get(1); + read2Primary.setFlags(147); // second in pair, mate unmapped, not primary alignment + read2Primary.setAlignmentStart(1596); // move the read + + final GATKSAMRecord read2NonPrimary = new GATKSAMRecord(read2Primary); + read2NonPrimary.setReadName("foo"); + read2NonPrimary.setFlags(393); // second in proper pair, on reverse strand + read2NonPrimary.setAlignmentStart(451); + read2NonPrimary.setMateAlignmentStart(451); + + final ConstrainedMateFixingManager manager = new ConstrainedMateFixingManager(null, genomeLocParser, 10000, 200, 10000); + manager.addRead(read2NonPrimary, false, false); + manager.addRead(read1, false, false); + + for ( int i = 0; i < ConstrainedMateFixingManager.EMIT_FREQUENCY; i++ ) + manager.addRead(ArtificialSAMUtils.createArtificialRead(header, "foo" + i, 0, 1500, 10), false, false); + + Assert.assertTrue(manager.forMateMatching.containsKey("foo")); + } } \ No newline at end of file From 3e5ff6095f88e51c089a86ab326731885d8748ab Mon Sep 17 00:00:00 2001 From: Valentin Ruano-Rubio Date: Fri, 21 Jun 2013 17:02:26 -0400 Subject: [PATCH 89/99] Added the pertinent DocumentedGATKFeature annotation ot AnalyzeCovariates --- .../sting/gatk/walkers/bqsr/AnalyzeCovariates.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java index 7a7527dd1..c8c5eae0b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java @@ -49,12 +49,15 @@ import com.google.java.contract.Requires; import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.commandline.Input; import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; +import org.broadinstitute.sting.utils.help.HelpConstants; import org.broadinstitute.sting.utils.recalibration.RecalUtils; import org.broadinstitute.sting.utils.recalibration.RecalibrationReport; import org.broadinstitute.sting.utils.recalibration.BaseRecalibration; @@ -199,6 +202,7 @@ import java.util.Map; * @version 6/16/2013 * @since 2.6 */ +@DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class}) public final class AnalyzeCovariates extends RodWalker { From 165b936fcd6160bfd82da79c36ef2e32765006fd Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Mon, 24 Jun 2013 14:06:21 -0400 Subject: [PATCH 91/99] Fixing the 'header is negative' problem in Reduce Reads... again. Previous fixes and tests only covered trailing soft-clips. Now that up front hard-clipping is working properly though, we were failing on those in the tool. Added a patch for this as well as a separate test independent of the soft-clips to make sure that it's working properly. --- .../reducereads/SlidingWindowUnitTest.java | 17 +++++++++++++++-- .../sting/utils/sam/ReadUtils.java | 6 +++--- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java index c9bb2f084..bf45fc298 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java @@ -94,7 +94,7 @@ public class SlidingWindowUnitTest extends BaseTest { ////////////////////////////////////////////////////////////////////////////////////// @Test(enabled = true) - public void testLeadingClipThenInsertion() { + public void testLeadingSoftClipThenInsertion() { final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 10); read.setReadBases(Utils.dupBytes((byte) 'A', 10)); @@ -104,8 +104,21 @@ public class SlidingWindowUnitTest extends BaseTest { final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 1); slidingWindow.addRead(read); - Pair, CompressionStash> result = slidingWindow.close(null); + slidingWindow.close(null); + } + @Test(enabled = true) + public void testLeadingHardClipThenInsertion() { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 8); + read.setReadBases(Utils.dupBytes((byte) 'A', 8)); + read.setBaseQualities(Utils.dupBytes((byte)30, 8)); + read.setMappingQuality(30); + read.setCigarString("2H2I6M"); + + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 10, header, new GATKSAMReadGroupRecord("test"), 0, 0.05, 0.05, 0.05, 20, 20, 100, ReduceReads.DownsampleStrategy.Normal, false); + slidingWindow.addRead(read); + slidingWindow.close(null); } ////////////////////////////////////////////////////////////////////////////////////// diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index cf1c9cb8e..f9393cc4b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -613,15 +613,15 @@ public class ReadUtils { * Checks if a read starts with an insertion. * * @param cigarForRead the CIGAR to evaluate - * @param ignoreClipOps should we ignore S and H operators when evaluating whether an I operator is at the beginning? + * @param ignoreSoftClipOps should we ignore S operators when evaluating whether an I operator is at the beginning? Note that H operators are always ignored. * @return the element if it's a leading insertion or null otherwise */ - public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreClipOps) { + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreSoftClipOps) { for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) { if ( cigarElement.getOperator() == CigarOperator.INSERTION ) return cigarElement; - else if ( !ignoreClipOps || (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) ) + else if ( cigarElement.getOperator() != CigarOperator.HARD_CLIP && ( !ignoreSoftClipOps || cigarElement.getOperator() != CigarOperator.SOFT_CLIP) ) break; } return null; From 94294ed6c44c00ba53e7232763c01f9658f91361 Mon Sep 17 00:00:00 2001 From: David Roazen Date: Tue, 25 Jun 2013 15:48:44 -0400 Subject: [PATCH 93/99] Move DownsampleReadsQC walker to private --- .../walkers/readutils/DownsampleReadsQC.java | 105 ------------------ 1 file changed, 105 deletions(-) delete mode 100644 public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java deleted file mode 100644 index 1141a9164..000000000 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2012 The Broad Institute - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, - * copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following - * conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR - * THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -package org.broadinstitute.sting.gatk.walkers.readutils; - -import org.broadinstitute.sting.commandline.Argument; -import org.broadinstitute.sting.commandline.Output; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; -import org.broadinstitute.sting.gatk.downsampling.DownsamplingUtils; -import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; -import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; -import org.broadinstitute.sting.gatk.walkers.DataSource; -import org.broadinstitute.sting.gatk.walkers.NanoSchedulable; -import org.broadinstitute.sting.gatk.walkers.ReadWalker; -import org.broadinstitute.sting.gatk.walkers.Requires; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; - -/** - */ -@Requires({DataSource.READS, DataSource.REFERENCE}) -public class DownsampleReadsQC extends ReadWalker> implements NanoSchedulable { - @Output(doc="Write output to this BAM filename instead of STDOUT", required = true) - StingSAMFileWriter out; - - @Argument(fullName = "minReadsPerAlignmentStart", shortName = "minReadsPerAlignmentStart", doc ="", required = false) - private int minReadsPerAlignmentStart = 5; - - @Argument(fullName = "downsampleTo", shortName = "downsampleTo", doc ="", required = false) - private int downsampleTo = 1000; - - /** - * The initialize function. - */ - public void initialize() { -// final boolean preSorted = true; -// if (getToolkit() != null && getToolkit().getArguments().BQSR_RECAL_FILE != null && !NO_PG_TAG ) { -// Utils.setupWriter(out, getToolkit(), getToolkit().getSAMFileHeader(), !preSorted, keep_records, this, PROGRAM_RECORD_NAME); -// } - } - - /** - * The reads map function. - * - * @param ref the reference bases that correspond to our read, if a reference was provided - * @param readIn the read itself, as a GATKSAMRecord - * @return the read itself - */ - public GATKSAMRecord map( ReferenceContext ref, GATKSAMRecord readIn, RefMetaDataTracker metaDataTracker ) { - return readIn; - } - - /** - * reduceInit is called once before any calls to the map function. We use it here to setup the output - * bam file, if it was specified on the command line - * - * @return SAMFileWriter, set to the BAM output file if the command line option was set, null otherwise - */ - public Collection reduceInit() { - return new LinkedList(); - } - - /** - * given a read and a output location, reduce by emitting the read - * - * @param read the read itself - * @param output the output source - * @return the SAMFileWriter, so that the next reduce can emit to the same source - */ - public Collection reduce( GATKSAMRecord read, Collection output ) { - output.add(read); - return output; - } - - @Override - public void onTraversalDone(Collection result) { - for ( final GATKSAMRecord read : DownsamplingUtils.levelCoverageByPosition(new ArrayList(result), downsampleTo, minReadsPerAlignmentStart) ) - out.addAlignment(read); - } -} From f242be12c0f7ab40a9b7fe3270888c5ba355ce09 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Wed, 26 Jun 2013 11:45:21 -0400 Subject: [PATCH 94/99] Make this walker @Hidden --- .../sting/gatk/walkers/qc/AssessReducedQuals.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java index a3bdc6691..13daee8c9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.qc; import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Hidden; import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; @@ -89,7 +90,7 @@ import java.util.List; * * @author ami */ - +@Hidden public class AssessReducedQuals extends LocusWalker implements TreeReducible { private static final String reduced = "reduced"; From 876e40466a43660d9fbf280bb03a3436b29e20f0 Mon Sep 17 00:00:00 2001 From: Eric Banks Date: Wed, 26 Jun 2013 14:48:09 -0400 Subject: [PATCH 95/99] Proper fix for previous RR -cancer_mode fix. I "fixed" this once before but instead of testing with unit tests I used integration tests. Bad decision. The proper fix is in now, with a bonafide unit test included. --- .../reducereads/SlidingWindow.java | 2 +- .../reducereads/SlidingWindowUnitTest.java | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java index 0425af3df..5115a6777 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java @@ -878,7 +878,7 @@ public class SlidingWindow { int stop = region.getStop() - windowHeaderStart; // make sure the bitset is complete given the region (it might not be in multi-sample mode) - if ( region.getStop() > markedSites.getStartLocation() + markedSites.getVariantSiteBitSet().length ) + if ( region.getStop() > markedSites.getStartLocation() + markedSites.getVariantSiteBitSet().length - 1 ) markSites(region.getStop()); CloseVariantRegionResult closeVariantRegionResult = closeVariantRegion(start, stop, knownSnpPositions); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java index bf45fc298..bd0a8933c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java @@ -253,6 +253,33 @@ public class SlidingWindowUnitTest extends BaseTest { return count; } + @Test(enabled = true) + public void testMarkingRegionInCancerMode() { + + final int contextSize = 10; + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, contextSize, header, new GATKSAMReadGroupRecord("test"), 0, 0.05, 0.05, 0.05, 20, 20, 100, ReduceReads.DownsampleStrategy.Normal, false); + slidingWindow.addRead(createSimpleRead("1", 0, 34, 75)); + slidingWindow.addRead(createSimpleRead("2", 0, 97, 73)); + slidingWindow.addRead(createSimpleRead("3", 0, 98, 75)); + slidingWindow.addRead(createSimpleRead("4", 0, 98, 75)); + slidingWindow.addRead(createSimpleRead("5", 0, 98, 75)); + + final CompressionStash regions = new CompressionStash(); + regions.add(new FinishedGenomeLoc("1", 0, 89, 109, true)); + + slidingWindow.closeVariantRegions(regions, null, false); + Assert.assertEquals(slidingWindow.getMarkedSitesForTesting().getVariantSiteBitSet().length, 76 + contextSize); + } + + private GATKSAMRecord createSimpleRead(final String name, final int refIndex, final int alignmentStart, final int length) { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, name, refIndex, alignmentStart, length); + read.setReadBases(Utils.dupBytes((byte) 'A', length)); + read.setBaseQualities(Utils.dupBytes((byte) 30, length)); + read.setMappingQuality(60); + return read; + } + ///////////////////////////////////////////////////////////////// //// This section tests the consensus creation functionality ////