1 package org.broadinstitute.hellbender.utils.pairhmm;
2 
3 import org.broadinstitute.gatk.nativebindings.pairhmm.PairHMMNativeArguments;
4 import org.broadinstitute.hellbender.GATKBaseTest;
5 import org.broadinstitute.hellbender.exceptions.UserException;
6 import org.broadinstitute.hellbender.utils.genotyper.LikelihoodMatrix;
7 import org.broadinstitute.hellbender.utils.haplotype.Haplotype;
8 import org.broadinstitute.hellbender.utils.read.ArtificialReadUtils;
9 import org.broadinstitute.hellbender.utils.read.GATKRead;
10 import org.broadinstitute.hellbender.utils.read.ReadUtils;
11 import org.testng.Assert;
12 import org.testng.SkipException;
13 import org.testng.annotations.Test;
14 import picard.util.BasicInputParser;
15 
16 import java.io.FileInputStream;
17 import java.io.FileNotFoundException;
18 import java.util.Arrays;
19 import java.util.List;
20 
21 public final class VectorPairHMMUnitTest extends GATKBaseTest {
22 
23     private static final String pairHMMTestData = publicTestDir + "pairhmm-testdata.txt";
24 
25     // This method originally used a DataProvider to individually test each available implementation,
26     // but was refactored to avoid resultant intermittent failures
27     // (possibly caused by concurrency issues when loading libraries;
28     // see https://github.com/broadinstitute/gatk/pull/5026#issuecomment-607596205).
29     @Test
testLikelihoodsFromHaplotypesForAvailableImplementations()30     public void testLikelihoodsFromHaplotypesForAvailableImplementations() {
31         final PairHMMNativeArguments args = new PairHMMNativeArguments();
32         args.useDoublePrecision = false;
33         args.maxNumberOfThreads = 1;
34 
35         // Skip this test on Java 11. Re-enable when https://github.com/broadinstitute/gatk/issues/6649 is fixed.
36         final String jvmVersionString = System.getProperty("java.version");
37         if (jvmVersionString.startsWith("1.11")) {
38             throw new SkipException("testLikelihoodsFromHaplotypesForAvailableImplementations on Java 11");
39         }
40 
41         for (final VectorLoglessPairHMM.Implementation imp : VectorLoglessPairHMM.Implementation.values()) {
42             PairHMM hmm;
43             try {
44                 hmm = new VectorLoglessPairHMM(imp, args);
45                 //hmm.doNotUseTristateCorrection();
46             } catch (final UserException.HardwareFeatureException e ) {
47                 logger.warn(String.format("PairHMM implementation %s not available, skipping test...", imp.name()));
48                 continue;
49             }
50 
51             BasicInputParser parser = null;
52             try {
53                 parser = new BasicInputParser(true, new FileInputStream(pairHMMTestData));
54             } catch (final FileNotFoundException e) {
55                 Assert.fail("PairHMM test data not found : " + pairHMMTestData);
56             }
57 
58             while (parser.hasNext()) {
59                 String tokens[] = parser.next();
60 
61                 final Haplotype hap = new Haplotype(tokens[0].getBytes(), true);
62 
63                 final byte[] bases = tokens[1].getBytes();
64                 final byte[] baseQuals = normalize(tokens[2].getBytes(), 6);
65                 final byte[] insertionQuals = normalize(tokens[3].getBytes());
66                 final byte[] deletionQuals = normalize(tokens[4].getBytes());
67                 final byte[] gcp = normalize(tokens[5].getBytes());
68                 final double expectedResult = Double.parseDouble(tokens[6]);
69 
70                 final int readLength = bases.length;
71                 final GATKRead read = ArtificialReadUtils.createArtificialRead(bases, baseQuals, readLength + "M");
72                 ReadUtils.setInsertionBaseQualities(read, insertionQuals);
73                 ReadUtils.setDeletionBaseQualities(read, deletionQuals);
74 
75                 final PairHMMInputScoreImputator inputScoreImputator = (r_) ->
76                     new PairHMMInputScoreImputation() {
77 
78                         @Override
79                         public byte[] delOpenPenalties() {
80                             return deletionQuals;
81                         }
82 
83                         @Override
84                         public byte[] insOpenPenalties() {
85                             return insertionQuals;
86                         }
87 
88                         @Override
89                         public byte[] gapContinuationPenalties() {
90                             return gcp;
91                         }
92                     };
93 
94                 hmm.initialize(Arrays.asList(hap), null, 0, 0);
95                 hmm.computeLog10Likelihoods(matrix(Arrays.asList(hap)), Arrays.asList(read), inputScoreImputator);
96 
97                 final double[] la = hmm.getLogLikelihoodArray();
98 
99                 Assert.assertEquals(la[0], expectedResult, 1e-5,
100                         String.format("Likelihood not in expected range for PairHMM implementation: %s.", imp.name()));
101             }
102 
103             hmm.close();
104         }
105     }
106 
normalize(byte[] scores)107     private static byte[] normalize(byte[] scores) {
108         return normalize(scores, 0);
109     }
110 
normalize(byte[] scores, int min)111     private static byte[] normalize(byte[] scores, int min) {
112         for (int i = 0; i < scores.length; i++) {
113             scores[i] -= 33;
114             scores[i] = scores[i] < min ? (byte)min : scores[i];
115         }
116         return scores;
117     }
118 
119     private LikelihoodMatrix<GATKRead, Haplotype> matrix(final List<Haplotype> haplotypes) {
120         return new LikelihoodMatrix<GATKRead, Haplotype>() {
121             @Override
122             public List<GATKRead> evidence() {
123                 throw new UnsupportedOperationException();
124             }
125 
126             @Override
127             public List<Haplotype> alleles() {
128                 return haplotypes;
129             }
130 
131             @Override
132             public void set(int alleleIndex, int evidenceIndex, double value) {
133 //                throw new UnsupportedOperationException();
134             }
135 
136             @Override
137             public double get(int alleleIndex, int evidenceIndex) {
138                 throw new UnsupportedOperationException();
139             }
140 
141             @Override
142             public int indexOfAllele(Haplotype allele) {
143                 throw new UnsupportedOperationException();
144             }
145 
146             @Override
147             public int indexOfEvidence(GATKRead evidence) {
148                 throw new UnsupportedOperationException();
149             }
150 
151             @Override
152             public int numberOfAlleles() {
153                 return haplotypes.size();
154             }
155 
156             @Override
157             public int evidenceCount() {
158                 throw new UnsupportedOperationException();
159             }
160 
161             @Override
162             public Haplotype getAllele(int alleleIndex) {
163                 throw new UnsupportedOperationException();
164             }
165 
166             @Override
167             public GATKRead getEvidence(int evidenceIndex) {
168                 throw new UnsupportedOperationException();
169             }
170 
171             @Override
172             public void copyAlleleLikelihoods(int alleleIndex, double[] dest, int offset) {
173                 throw new UnsupportedOperationException();
174             }
175         };
176     }
177 
178 }
179