1 package var2; 2 3 import java.util.ArrayList; 4 import java.util.HashMap; 5 import java.util.Iterator; 6 import java.util.Map; 7 import java.util.Map.Entry; 8 import java.util.concurrent.ConcurrentHashMap; 9 10 import shared.Shared; 11 import shared.Tools; 12 import structures.ByteBuilder; 13 14 public class VarMap implements Iterable<Var> { 15 16 /*--------------------------------------------------------------*/ 17 /*---------------- Construction ----------------*/ 18 /*--------------------------------------------------------------*/ 19 VarMap(ScafMap scafMap_)20 VarMap(ScafMap scafMap_){ 21 this(scafMap_, -1, -1, -1, -1, -1); 22 } 23 24 @SuppressWarnings("unchecked") VarMap(ScafMap scafMap_, int ploidy_, double pairingRate_, double totalQualityAvg_, double mapqAvg_, double readLengthAvg_)25 VarMap(ScafMap scafMap_, int ploidy_, double pairingRate_, double totalQualityAvg_, 26 double mapqAvg_, double readLengthAvg_){ 27 scafMap=scafMap_; 28 ploidy=ploidy_; 29 properPairRate=pairingRate_; 30 totalQualityAvg=totalQualityAvg_; 31 totalMapqAvg=mapqAvg_; 32 readLengthAvg=readLengthAvg_; 33 maps=new ConcurrentHashMap[WAYS]; 34 for(int i=0; i<WAYS; i++){ 35 maps[i]=new ConcurrentHashMap<Var, Var>(); 36 } 37 } 38 39 // public static VarMap loadVars(String fname, ScafMap scafMap){ 40 // final ByteFile bf=ByteFile.makeByteFile(fname, true); 41 // final VarMap varMap=new VarMap(scafMap); 42 // final byte delimiter='\t'; 43 // int ploidy=-1; 44 // double pairingRate=-1; 45 // double mapqAvg=-1; 46 // double totalQualityAvg=-1; 47 // double readLengthAvg=-1; 48 // byte[] line=bf.nextLine(); 49 // while(line!=null && line.length>0){ 50 // if(line[0]!='#'){ 51 // Var v=new Var(line, delimiter); 52 // varMap.addUnsynchronized(v); 53 // }else{ 54 // String[] split=new String(line).split("\t"); 55 // String a=split[0], b=(split.length>1 ? split[1] : null); 56 // assert(split.length>1) : new String(line); 57 // if(a.equalsIgnoreCase("#ploidy")){ 58 // ploidy=Integer.parseInt(b); 59 // }else if(a.equalsIgnoreCase("#pairingRate")){ 60 // pairingRate=Double.parseDouble(b); 61 // }else if(a.equalsIgnoreCase("#totalQualityAvg")){ 62 // totalQualityAvg=Double.parseDouble(b); 63 // }else if(a.equalsIgnoreCase("#mapqAvg")){ 64 // mapqAvg=Double.parseDouble(b); 65 // }else if(a.equalsIgnoreCase("#readLengthAvg")){ 66 // readLengthAvg=Double.parseDouble(b); 67 // } 68 // } 69 // line=bf.nextLine(); 70 // } 71 // bf.close(); 72 // varMap.ploidy=ploidy; 73 // varMap.properPairRate=(double)pairingRate; 74 // varMap.totalQualityAvg=(double)totalQualityAvg; 75 // varMap.totalMapqAvg=(double)mapqAvg; 76 // varMap.readLengthAvg=(double)readLengthAvg; 77 // return varMap; 78 // } 79 // 80 // //#CHROM POS ID REF ALT QUAL 81 // public static VarMap loadVcf(String fname, ScafMap scafMap){ 82 // ByteFile bf=ByteFile.makeByteFile(fname, true); 83 // VarMap varMap=new VarMap(scafMap); 84 // byte[] line=bf.nextLine(); 85 // while(line!=null && line.length>0){ 86 // if(line[0]!='#'){ 87 // Var v; 88 // try { 89 // v = Var.fromVCF(line, scafMap); 90 // varMap.addUnsynchronized(v); 91 // } catch (Exception e) { 92 // System.err.println("Unable to parse VCF line: '"+new String(line)+"'"); 93 //// throw new RuntimeException(e); 94 // } 95 // }else{ 96 // String[] split=new String(line).split("="); 97 // if(split.length==2){ 98 // String a=split[0], b=split[1]; 99 // if(a.equalsIgnoreCase("##ploidy")){ 100 // varMap.ploidy=Integer.parseInt(b); 101 // }else if(a.equalsIgnoreCase("##properPairRate")){ 102 // varMap.properPairRate= Double.parseDouble(b); 103 // }else if(a.equalsIgnoreCase("##totalQualityAvg")){ 104 // varMap.totalQualityAvg= Double.parseDouble(b); 105 // }else if(a.equalsIgnoreCase("##mapqAvg")){ 106 // varMap.totalMapqAvg= Double.parseDouble(b); 107 // }else if(a.equalsIgnoreCase("##readLengthAvg")){ 108 // varMap.readLengthAvg= Double.parseDouble(b); 109 // } 110 // } 111 // } 112 // line=bf.nextLine(); 113 // } 114 // bf.close(); 115 // return varMap; 116 // } 117 118 /*--------------------------------------------------------------*/ 119 /*---------------- Methods ----------------*/ 120 /*--------------------------------------------------------------*/ 121 countNearbyVars(VarFilter varFilter)122 public int countNearbyVars(VarFilter varFilter) { 123 return countNearbyVars(varFilter, varFilter.maxNearbyCount, varFilter.nearbyDist, 124 varFilter.nearbyGap, varFilter.flagNearby); 125 } 126 countNearbyVars(VarFilter varFilter, final int maxCount0, final int maxDist, final int maxGap, final boolean flag)127 public int countNearbyVars(VarFilter varFilter, final int maxCount0, final int maxDist, final int maxGap, final boolean flag) { 128 final int maxCount=maxCount0<0 ? 19 : Tools.mid(maxCount0, 8, 19); 129 final Var[] array=toArray(true); 130 int failed=0; 131 for(int vloc=0; vloc<array.length; vloc++){ 132 int x=countNearbyVars(varFilter, array, vloc, maxCount, maxDist, maxGap, flag); 133 if(x>maxCount){failed++;} 134 } 135 return failed; 136 } 137 138 private boolean passesSolo(Var v, VarFilter varFilter){ 139 assert(varFilter!=null); 140 if(varFilter==null){return true;} 141 boolean pass=varFilter.passesFast(v); 142 if(pass){ 143 v.calcCoverage(scafMap); 144 pass=v.forced() || varFilter.passesFilter(v, properPairRate, totalQualityAvg, 145 totalMapqAvg, readLengthAvg, ploidy, scafMap, false); 146 } 147 return pass; 148 } 149 150 public int countNearbyVars(VarFilter varFilter, final Var[] array, final int vloc0, final int maxCount, 151 final int maxDist, final int maxGap, final boolean flag) { 152 final Var v0=array[vloc0]; 153 assert(v0.nearbyVarCount==-1) : "Nearby vars were already counted?"; 154 int nearby=0; 155 156 {//Scan left 157 Var prev=v0; 158 for(int i=vloc0-1; i>=0 && nearby<=maxCount; i--){ 159 final Var v=array[i]; 160 // v.stop==v.start means adjacent; 161 if(prev.start-v.stop>maxGap || v0.start-v.stop>maxDist){break;} 162 163 if(!v.forced() || passesSolo(v, varFilter)){ 164 nearby++; 165 prev=v; 166 } 167 } 168 } 169 {//Scan right 170 Var prev=v0; 171 for(int i=vloc0+1; i<array.length && nearby<=maxCount; i++){ 172 final Var v=array[i]; 173 // v.stop==v.start means adjacent; 174 if(v.start-prev.stop>maxGap || v.start-v0.stop>maxDist){break;} 175 176 if(!v.forced() || passesSolo(v, varFilter)){ 177 nearby++; 178 prev=v; 179 } 180 } 181 } 182 v0.nearbyVarCount=nearby; 183 if(flag && nearby>varFilter.maxNearbyCount){ 184 v0.setFlagged(true); 185 } 186 return nearby; 187 } 188 189 /*--------------------------------------------------------------*/ 190 /*---------------- Getters ----------------*/ 191 /*--------------------------------------------------------------*/ 192 193 containsKey(Var v)194 public boolean containsKey(Var v) { 195 return get(v)!=null; 196 } 197 get(final Var v)198 Var get(final Var v){ 199 final int way=v.start&MASK; 200 return maps[way].get(v); 201 } 202 size()203 public long size(){ 204 long size=0; 205 for(int i=0; i<maps.length; i++){size+=maps[i].size();} 206 return size; 207 } 208 size2()209 public long size2(){//123 Slow 210 assert(false) : "Slow"; 211 int i=0; 212 for(Var v : this){i++;} 213 return i; 214 } 215 216 /*--------------------------------------------------------------*/ 217 /*---------------- Adders ----------------*/ 218 /*--------------------------------------------------------------*/ 219 add(final Var v)220 private int add(final Var v){ 221 // assert(size()==size2());//123; 222 // assert(mappedToSelf(false));//123 223 final ConcurrentHashMap<Var, Var> map=maps[v.start&MASK]; 224 synchronized(map){ 225 Var old=map.get(v); 226 if(old==null){ 227 map.put(v, v); 228 // assert(mappedToSelf(false));//123 229 return 1; 230 } 231 else{ 232 synchronized(old){ 233 old.add(v); 234 // assert(mappedToSelf(false));//123 235 } 236 } 237 } 238 // assert(size()==size2());//123; 239 // assert(mappedToSelf(false));//123 240 return 0; 241 } 242 addUnsynchronized(final Var v)243 int addUnsynchronized(final Var v){ 244 // assert(mappedToSelf(false));//123 245 final ConcurrentHashMap<Var, Var> map=maps[v.start&MASK]; 246 Var old=map.get(v); 247 if(old==null){ 248 map.put(v, v); 249 // assert(mappedToSelf(false));//123 250 return 1; 251 } 252 old.add(v); 253 // assert(mappedToSelf(false));//123 254 return 0; 255 } 256 removeUnsynchronized(Var v)257 int removeUnsynchronized(Var v){ 258 final ConcurrentHashMap<Var, Var> map=maps[v.start&MASK]; 259 return map.remove(v)==null ? 0 : 1; 260 } 261 dumpVars(HashMap<Var, Var> mapT)262 int dumpVars(HashMap<Var, Var> mapT){ 263 int added=0; 264 @SuppressWarnings("unchecked") 265 ArrayList<Var>[] absent=new ArrayList[WAYS]; 266 for(int i=0; i<WAYS; i++){ 267 absent[i]=new ArrayList<Var>(); 268 } 269 for(Entry<Var, Var> e : mapT.entrySet()){ 270 Var v=e.getValue(); 271 final int way=v.start&MASK; 272 ConcurrentHashMap<Var, Var> map=maps[way]; 273 Var old=map.get(v); 274 if(old==null){absent[way].add(v);} 275 else{ 276 synchronized(old){ 277 old.add(v); 278 } 279 } 280 } 281 282 mapT.clear(); 283 for(int way=0; way<WAYS; way++){ 284 ConcurrentHashMap<Var, Var> map=maps[way]; 285 ArrayList<Var> list=absent[way]; 286 synchronized(map){ 287 for(Var v : list){ 288 Var old=get(v); 289 if(old==null){ 290 map.put(v, v); 291 added++; 292 } 293 else{ 294 synchronized(old){ 295 old.add(v); 296 } 297 } 298 } 299 } 300 } 301 return added; 302 } 303 304 /*--------------------------------------------------------------*/ 305 /*---------------- Other ----------------*/ 306 /*--------------------------------------------------------------*/ 307 processVariantsST(VarFilter filter, long[][] scoreArray, long[] ploidyArray, long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray)308 public long[] processVariantsST(VarFilter filter, long[][] scoreArray, long[] ploidyArray, long[][] avgQualityArray, 309 long[] maxQualityArray, long[][] ADArray, double[] AFArray) { 310 assert(properPairRate>=0); 311 assert(ploidy>0); 312 assert(totalQualityAvg>=0); 313 314 long[] types=new long[Var.VAR_TYPES]; 315 for(ConcurrentHashMap<Var, Var> map : maps){ 316 long[] types2=processVariants(map, filter, null, null, null, null, null, null, false, false); 317 types2=processVariants(map, filter, null, null, null, null, null, null, true, false); 318 types2=processVariants(map, filter, scoreArray, ploidyArray, avgQualityArray, maxQualityArray, ADArray, AFArray, false, false); 319 Tools.add(types, types2); 320 } 321 return types; 322 } 323 324 // public long[] addSharedVariantsST(VarFilter filter, VarMap sharedVarMap) { 325 // assert(properPairRate>=0); 326 // assert(ploidy>0); 327 // assert(totalQualityAvg>=0); 328 // 329 // long[] types=new long[Var.VAR_TYPES]; 330 // for(int i=0; i<maps.length; i++){ 331 // long[] types2=addSharedVariants(maps[i], sharedVarMap.maps[i]); 332 // Tools.add(types, types2); 333 // } 334 // return types; 335 // } 336 processVariantsMT(VarFilter filter, long[][] scoreArray, long[] ploidyArray, long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray)337 public long[] processVariantsMT(VarFilter filter, long[][] scoreArray, long[] ploidyArray, 338 long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray) { 339 processVariantsMT_inner(filter, null, null, null, null, null, null, false); 340 processVariantsMT_inner(filter, null, null, null, null, null, null, true); 341 return processVariantsMT_inner(filter, scoreArray, ploidyArray, avgQualityArray, maxQualityArray, ADArray, AFArray, false); 342 } 343 processVariantsMT_inner(VarFilter filter, long[][] scoreArray, long[] ploidyArray, long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray, boolean processInsertions)344 private long[] processVariantsMT_inner(VarFilter filter, long[][] scoreArray, long[] ploidyArray, 345 long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray, boolean processInsertions) { 346 assert(properPairRate>=0); 347 assert(ploidy>0); 348 assert(totalQualityAvg>=0); 349 350 ArrayList<ProcessThread> alpt=new ArrayList<ProcessThread>(WAYS); 351 for(int i=0; i<WAYS; i++){ 352 ProcessThread pt=new ProcessThread(maps[i], filter, scoreArray!=null, ploidyArray!=null, avgQualityArray!=null, ADArray!=null, processInsertions); 353 alpt.add(pt); 354 pt.start(); 355 } 356 357 long[] types=new long[Var.VAR_TYPES]; 358 boolean success=true; 359 for(ProcessThread pt : alpt){ 360 //Wait until this thread has terminated 361 while(pt.getState()!=Thread.State.TERMINATED){ 362 try { 363 //Attempt a join operation 364 pt.join(); 365 } catch (InterruptedException e) { 366 //Potentially handle this, if it is expected to occur 367 e.printStackTrace(); 368 } 369 } 370 371 //Accumulate per-thread statistics 372 if(pt.types!=null){ 373 Tools.add(types, pt.types); 374 } 375 if(scoreArray!=null){Tools.add(scoreArray, pt.scoreArray);} 376 if(ploidyArray!=null){Tools.add(ploidyArray, pt.ploidyArray);} 377 if(avgQualityArray!=null){Tools.add(avgQualityArray, pt.avgQualityArray);} 378 if(maxQualityArray!=null){Tools.add(maxQualityArray, pt.maxQualityArray);} 379 if(ADArray!=null){Tools.add(ADArray, pt.ADArray);} 380 if(ADArray!=null){Tools.add(AFArray, pt.AFArray);}//Note this is triggered on ADArray 381 success&=pt.success; 382 } 383 384 //Track whether any threads failed 385 // if(!success){errorState=true;} 386 387 return types; 388 } 389 390 private class ProcessThread extends Thread { 391 ProcessThread(Map<Var, Var> map_, VarFilter filter_, boolean trackScores, boolean trackPloidy, boolean trackQuality, boolean trackAD, boolean processInsertions_)392 ProcessThread(Map<Var, Var> map_, VarFilter filter_, boolean trackScores, boolean trackPloidy, 393 boolean trackQuality, boolean trackAD, boolean processInsertions_){ 394 map=map_; 395 filter=filter_; 396 scoreArray=(trackScores ? new long[8][200] : null); 397 ploidyArray=(trackPloidy ? new long[ploidy+1] : null); 398 avgQualityArray=(trackQuality ? new long[8][100] : null); 399 maxQualityArray=(trackQuality ? new long[100] : null); 400 ADArray=(trackAD ? new long[2][7] : null); 401 AFArray=(trackAD ? new double[7] : null); 402 processInsertions=processInsertions_; 403 } 404 405 @Override run()406 public void run(){ 407 types=processVariants(map, filter, scoreArray, ploidyArray, avgQualityArray, maxQualityArray, ADArray, AFArray, processInsertions, false); 408 success=true; 409 } 410 411 final VarFilter filter; 412 final Map<Var, Var> map; 413 long[] types; 414 final long[][] scoreArray; 415 final long[] ploidyArray; 416 final long[][] avgQualityArray; 417 final long[] maxQualityArray; 418 final long[][] ADArray; 419 final double[] AFArray; 420 boolean processInsertions; 421 boolean success=false; 422 } 423 processVariants(Map<Var, Var> map, VarFilter filter, long[][] scoreArray, long[] ploidyArray, long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray, boolean processInsertions, boolean considerNearby)424 private long[] processVariants(Map<Var, Var> map, VarFilter filter, long[][] scoreArray, long[] ploidyArray, 425 long[][] avgQualityArray, long[] maxQualityArray, long[][] ADArray, double[] AFArray, boolean processInsertions, boolean considerNearby) { 426 assert(properPairRate>=0); 427 assert(ploidy>0); 428 assert(totalQualityAvg>=0); 429 Iterator<Entry<Var, Var>> iterator=map.entrySet().iterator(); 430 long[] types=new long[Var.VAR_TYPES]; 431 while(iterator.hasNext()){ 432 Entry<Var, Var> entry=iterator.next(); 433 final Var v=entry.getValue(); 434 435 if(processInsertions){ 436 assert(readLengthAvg>0); 437 if(v.type()==Var.INS){ 438 synchronized(v){ 439 v.reviseAlleleFraction(readLengthAvg, scafMap.getScaffold(v.scafnum), this); 440 } 441 } 442 }else{ 443 boolean pass=filter.passesFast(v); 444 if(pass){ 445 v.calcCoverage(scafMap); 446 pass=v.forced() || filter.passesFilter(v, properPairRate, totalQualityAvg, totalMapqAvg, readLengthAvg, ploidy, scafMap, considerNearby); 447 } 448 if(pass){ 449 types[v.type()]++; 450 if(scoreArray!=null){ 451 int score=(int)v.phredScore(properPairRate, totalQualityAvg, totalMapqAvg, readLengthAvg, filter.rarity, ploidy, scafMap); 452 scoreArray[0][score]++; 453 scoreArray[v.type()+1][score]++; 454 } 455 if(ploidyArray!=null){ploidyArray[v.calcCopies(ploidy)]++;} 456 if(avgQualityArray!=null){ 457 int q=(int)v.baseQAvg(); 458 avgQualityArray[0][q]++; 459 avgQualityArray[v.type()+1][q]++; 460 } 461 if(maxQualityArray!=null){maxQualityArray[(int)v.baseQMax]++;} 462 if(ADArray!=null){ 463 ADArray[0][v.type()]+=v.alleleCount(); 464 ADArray[1][v.type()]+=v.coverage(); 465 } 466 if(AFArray!=null){AFArray[v.type()]+=v.alleleFraction();} 467 }else{ 468 iterator.remove(); 469 } 470 } 471 } 472 return types; 473 } 474 addSharedVariants(Map<Var, Var> map, Map<Var, Var> sharedMap)475 private long[] addSharedVariants(Map<Var, Var> map, Map<Var, Var> sharedMap) { 476 assert(properPairRate>=0); 477 assert(ploidy>0); 478 assert(totalQualityAvg>=0); 479 480 for(Var v : sharedMap.keySet()){ 481 if(!map.containsKey(v)){ 482 Var v2=new Var(v); 483 map.put(v2, v2); 484 } 485 } 486 487 long[] types=new long[Var.VAR_TYPES]; 488 for(Var v : sharedMap.keySet()){ 489 v.calcCoverage(scafMap); 490 types[v.type()]++; 491 } 492 return types; 493 } 494 toArray(boolean sort)495 public Var[] toArray(boolean sort) { 496 Var[] array=new Var[(int)size()]; 497 int i=0; 498 499 // assert(mappedToSelf(true));//123 500 501 for(Var v : this){ 502 // System.err.println(i+"\t"+v.start+"\t"+v.stop+"\t"+v.toKey()+"\t"+new String(v.allele)+"\t"+((Object)v).hashCode());//123 503 assert(i<array.length); 504 array[i]=v; 505 i++; 506 } 507 if(sort){Shared.sort(array);} 508 return array; 509 } 510 511 private boolean mappedToSelf(boolean quiet){//123 slow 512 assert(false) : "Slow"; 513 for(ConcurrentHashMap<Var, Var> map : maps){ 514 for(Var key : map.keySet()){ 515 Var value=map.get(key); 516 assert(value!=null); 517 assert(value.equals(key)); 518 assert(value==key); 519 assert(map.get(value).equals(key)); 520 } 521 for(Entry<Var, Var> e : map.entrySet()){ 522 Var key=e.getKey(); 523 Var value=e.getValue(); 524 assert(value!=null); 525 assert(value.equals(key)); 526 assert(value==key); 527 } 528 for(ConcurrentHashMap<Var, Var> map2 : maps){ 529 if(map2!=map){ 530 for(Var key : map.keySet()){ 531 assert(!map2.containsKey(key)); 532 } 533 } 534 } 535 } 536 int i=0; 537 for(Var v : this){ 538 if(!quiet){System.err.println(i+"\t"+v.start+"\t"+v.stop+"\t"+v.toKey()+"\t"+v.hashcode+"\t"+v.hashCode()+"\t"+new String(v.allele)+"\t"+((Object)v).hashCode());} 539 Var v2=get(v); 540 assert(v==v2); 541 assert(get(v2)==v); 542 assert(get(v)==v) : "\n"+i+"\t"+v2.start+"\t"+v2.stop+"\t"+v2.toKey()+"\t"+v2.hashcode+"\t"+v2.hashCode()+"\t"+new String(v2.allele)+"\t"+((Object)v2).hashCode(); 543 i++; 544 } 545 assert(i==size()) : i+", "+size()+", "+size2(); 546 // assert(false) : i+", "+size(); 547 return true; 548 } 549 550 public long[] calcCoverage(ScafMap scafMap) { 551 long[] types=new long[Var.VAR_TYPES]; 552 for(Var v : this){ 553 v.calcCoverage(scafMap); 554 types[v.type()]++; 555 } 556 return types; 557 } 558 559 public long[] countTypes() { 560 long[] types=new long[Var.VAR_TYPES]; 561 for(Var v : this){ 562 types[v.type()]++; 563 } 564 return types; 565 } 566 567 // public void writeVarFile_(FileFormat ff, VarFilter filter, long reads, long pairs, long properPairs, long bases, String ref){ 568 // Var[] array=toArray(true); 569 // ByteStreamWriter bsw=new ByteStreamWriter(ff); 570 // bsw.start(); 571 // ByteBuilder bb=new ByteBuilder(33000); 572 // bb.append(Var.toVarHeader(properPairRate, totalQualityAvg, totalMapqAvg, filter.rarity, filter.minAlleleFraction, 573 // ploidy, reads, pairs, properPairs, bases, ref)).append('\n'); 574 // for(Var v : array){ 575 // v.toText(bb, properPairRate, totalQualityAvg, totalMapqAvg, readLengthAvg, filter.rarity, ploidy, scafMap);//TODO: Track depth 576 // bb.nl(); 577 // if(bb.length()>16384){ 578 // bsw.print(bb); 579 // bb.clear(); 580 // } 581 // } 582 // if(bb.length()>0){ 583 // bsw.print(bb); 584 // bb.clear(); 585 // } 586 // bsw.poisonAndWait(); 587 // } 588 // 589 // public void writeVcfFile_(String fname, VarFilter filter, long reads, long pairs, long properPairs, long bases, String ref, String sampleName, boolean trimWhitespace){ 590 // FileFormat ff=FileFormat.testOutput(fname, FileFormat.TEXT, null, true, true, false, false); 591 // writeVcfFile(ff, filter, reads, pairs, properPairs, bases, ref, sampleName, trimWhitespace); 592 // } 593 // 594 // public void writeVcfFile_(FileFormat ff, VarFilter filter, long reads, long pairs, long properPairs, long bases, String ref, String sampleName, boolean trimWhitespace){ 595 // Var[] array=toArray(true); 596 // ByteStreamWriter bsw=new ByteStreamWriter(ff); 597 // bsw.start(); 598 // ByteBuilder bb=new ByteBuilder(33000); 599 // bb.append(Var.toVcfHeader(properPairRate, totalQualityAvg, totalMapqAvg, filter.rarity, filter.minAlleleFraction, 600 // ploidy, reads, pairs, properPairs, bases, ref, scafMap, sampleName, trimWhitespace)).append('\n'); 601 // for(Var v : array){ 602 // v.toVCF(bb, properPairRate, totalQualityAvg, totalMapqAvg, readLengthAvg, ploidy, scafMap, filter, trimWhitespace); 603 // bb.nl(); 604 // if(bb.length()>16384){ 605 // bsw.print(bb); 606 // bb.clear(); 607 // } 608 // } 609 // if(bb.length()>0){ 610 // bsw.print(bb); 611 // bb.clear(); 612 // } 613 // bsw.poisonAndWait(); 614 // } 615 616 617 public void clear() { 618 // assert(mappedToSelf(false));//123 619 properPairRate=-1; 620 pairedInSequencingRate=-1; 621 totalQualityAvg=-1; 622 totalMapqAvg=-1; 623 readLengthAvg=-1; 624 for(int i=0; i<maps.length; i++){ 625 maps[i]=new ConcurrentHashMap<Var, Var>(); 626 } 627 // assert(mappedToSelf(false));//123 628 } 629 630 @Override 631 public String toString(){ 632 ByteBuilder sb=new ByteBuilder(); 633 for(ConcurrentHashMap<Var, Var> map : maps){ 634 for(Var v : map.keySet()){ 635 v.toTextQuick(sb); 636 sb.nl(); 637 } 638 } 639 return sb.toString(); 640 } 641 642 /*--------------------------------------------------------------*/ 643 /*---------------- Iteration ----------------*/ 644 /*--------------------------------------------------------------*/ 645 646 @Override 647 public VarMapIterator iterator(){ 648 // if(maps.length==1){return (maps[1].entrySet().iterator();} 649 return new VarMapIterator(); 650 } 651 652 private class VarMapIterator implements Iterator<Var> { 653 654 VarMapIterator(){ 655 // System.err.println("\nInit: ("+this.hashCode()+")");//123 656 makeReady(); 657 } 658 659 @Override 660 public boolean hasNext() { 661 return iter.hasNext(); 662 } 663 664 @Override 665 public Var next() { 666 // System.err.println("\nNext: ("+this.hashCode()+")");//123 667 Entry<Var, Var> e=iter.next(); 668 if(!iter.hasNext()){makeReady();} 669 Var v=e==null ? null : e.getValue(); 670 // System.err.println("Var "+v.hashCode());//123 671 return v; 672 } 673 674 private void makeReady(){ 675 // System.err.println("\nmakeReady ("+this.hashCode()+")");//123 676 // System.err.println("iter="+iter+", nextMap="+nextMap);//123 677 while((iter==null || !iter.hasNext()) && nextMap<maps.length){ 678 iter=maps[nextMap].entrySet().iterator(); 679 nextMap++; 680 // System.err.println("iter="+iter+", nextMap="+nextMap+" (loop)");//123 681 } 682 // System.err.println("break");//123 683 } 684 685 private int nextMap=0; 686 private Iterator<Entry<Var, Var>> iter=null; 687 688 } 689 690 /*--------------------------------------------------------------*/ 691 /*---------------- Fields ----------------*/ 692 /*--------------------------------------------------------------*/ 693 694 695 public int ploidy=-1; 696 public double properPairRate=-1; 697 public double pairedInSequencingRate=-1; 698 public double totalQualityAvg=-1; 699 public double totalMapqAvg=-1; 700 public double readLengthAvg=-1; 701 public final ScafMap scafMap; 702 final ConcurrentHashMap<Var, Var>[] maps; //ConcurrentHashMap appears to be faster than HashMap here, if there are lots of threads. 703 704 /*--------------------------------------------------------------*/ 705 /*---------------- Static fields ----------------*/ 706 /*--------------------------------------------------------------*/ 707 708 /** 709 * 710 */ 711 private static final long serialVersionUID = 1L; 712 /** Must be a power of 2. Max Vars stored is ways times 2 billion */ 713 private static final int WAYS=8; 714 public static final int MASK=WAYS-1; 715 716 } 717