1 /**************************************************************
2 *** RHmm package
3 ***
4 *** File: cLogBaumWelch.cpp
5 ***
6 *** Author: Ollivier TARAMASCO <Ollivier.Taramasco@imag.fr>
7 *** Author: Sebastian BAUER <sebastian.bauer@charite.de>
8 ***
9 **************************************************************/
10
11 #include "StdAfxRHmm.h"
12
13
cLogBaumWelch(uint theNSample,uint * theT,uint theNClass)14 cLogBaumWelch::cLogBaumWelch(uint theNSample, uint* theT, uint theNClass)
15 { MESS_CREAT("cLogBaumWelch")
16 mvNSample = theNSample ;
17 if (mvNSample == 0)
18 { mvT = NULL ;
19 mLogVrais.Delete() ;
20 mLogAlpha = NULL ;
21 mLogBeta = NULL ;
22 mLogGamma = NULL ;
23 mLogXsi = NULL ;
24 mSumLogXsi = NULL ;
25 mLogRho = NULL ;
26 return ;
27 }
28 mvT = new uint[mvNSample] ;
29 mLogVrais.ReAlloc(mvNSample) ;
30
31 mLogAlpha = new cDMatrix[mvNSample] ;
32 mLogBeta = new cDMatrix[mvNSample] ;
33 mLogGamma = new cDMatrix[mvNSample] ;
34 mLogXsi = new cDMatrix*[mvNSample] ;
35 mSumLogXsi = new cDMatrix[mvNSample] ;
36 mLogRho = new cDVector[mvNSample] ;
37 for (register uint n = 0 ; n < mvNSample ; n++)
38 { mvT[n] = theT[n] ;
39 mLogAlpha[n].ReAlloc(theNClass, mvT[n]) ;
40 mLogBeta[n].ReAlloc(theNClass, mvT[n]) ;
41 mLogGamma[n].ReAlloc(theNClass, mvT[n]) ;
42 mLogXsi[n] = new cDMatrix[mvT[n]] ;
43 for (register uint t = 0 ; t < mvT[n] ; t++)
44 mLogXsi[n][t].ReAlloc(theNClass, theNClass) ;
45 mSumLogXsi[n].ReAlloc(theNClass, theNClass) ;
46 mLogRho[n].ReAlloc(mvT[n]) ;
47 }
48 }
49
cLogBaumWelch(const cInParam & theInParam)50 cLogBaumWelch::cLogBaumWelch(const cInParam &theInParam)
51 { MESS_CREAT("cLogBaumWelch")
52 mvNSample = theInParam.mNSample ;
53 if (mvNSample == 0)
54 { mvT = NULL ;
55 mLogVrais.Delete() ;
56 mLogAlpha = NULL ;
57 mLogBeta = NULL ;
58 mLogGamma = NULL ;
59 mLogXsi = NULL ;
60 mLogRho = NULL ;
61 return ;
62 }
63 mvT = new uint[mvNSample] ;
64 mLogVrais.ReAlloc(mvNSample) ;
65
66 mLogAlpha = new cDMatrix[mvNSample] ;
67 mLogBeta = new cDMatrix[mvNSample] ;
68 mLogGamma = new cDMatrix[mvNSample] ;
69 mLogXsi = new cDMatrix*[mvNSample] ;
70 mSumLogXsi = new cDMatrix[mvNSample] ;
71 mLogRho = new cDVector[mvNSample] ;
72 for (register uint n = 0 ; n < mvNSample ; n++)
73 { mvT[n] = (theInParam.mY[n].mSize)/theInParam.mDimObs ;
74 mLogAlpha[n].ReAlloc(theInParam.mNClass, mvT[n]) ;
75 mLogBeta[n].ReAlloc(theInParam.mNClass, mvT[n]) ;
76 mLogGamma[n].ReAlloc(theInParam.mNClass, mvT[n]) ;
77 mLogXsi[n] = new cDMatrix[mvT[n]] ;
78 for (register uint t=0 ; t < mvT[n] ; t++)
79 mLogXsi[n][t].ReAlloc(theInParam.mNClass, theInParam.mNClass) ;
80 mSumLogXsi[n].ReAlloc(theInParam.mNClass, theInParam.mNClass) ;
81 mLogRho[n].ReAlloc(mvT[n]) ;
82 }
83 }
84
~cLogBaumWelch()85 cLogBaumWelch::~cLogBaumWelch()
86 { MESS_DESTR("cLogBaumWelch")
87 if (mvNSample > 0)
88 { for (register uint n = 0 ; n < mvNSample ; n++)
89 { mLogAlpha[n].Delete() ;
90 mLogBeta[n].Delete() ;
91 mLogGamma[n].Delete() ;
92 for (register uint t = 0 ; t < mvT[n] ; t++)
93 mLogXsi[n][t].Delete() ;
94 delete [] mLogXsi[n] ;
95 mSumLogXsi[n].Delete() ;
96 mLogRho[n].Delete() ;
97 }
98 delete [] mvT ;
99 delete [] mLogRho ;
100 delete [] mLogXsi ;
101 delete [] mSumLogXsi ;
102 delete [] mLogGamma ;
103 delete [] mLogBeta ;
104 delete [] mLogAlpha ;
105 }
106 }
107
LogForwardBackward(cDMatrix * theCondProba,cHmm & theHMM)108 void cLogBaumWelch::LogForwardBackward(cDMatrix* theCondProba, cHmm& theHMM)
109 {
110 register uint i,
111 j ;
112 register int t ;
113 double myLogAlpha,
114 myAux,
115 mySum ;
116 uint myNClass = theHMM.mInitProba.mSize ;
117
118 for (register uint n = 0 ; n < mvNSample ; n++)
119 {
120 int myT = (int)mvT[n] ;
121
122 mLogRho[n][0] = LOGZERO ;
123
124 for (i = 0 ; i < myNClass ; i++)
125 {
126 mLogAlpha[n][i][0] = elnproduct(eln(theHMM.mInitProba[i]), eln(theCondProba[n][i][0])) ;
127 mLogRho[n][0] = elnsum(mLogRho[n][0], mLogAlpha[n][i][0]) ;
128 }
129
130 mLogVrais[n] = mLogRho[n][0] ;
131 //forward
132 for (t = 0 ; t < myT-1 ; t++)
133 {
134 mLogRho[n][t+1] = LOGZERO ;
135 for (j = 0 ; j < myNClass ; j++)
136 {
137 myLogAlpha = LOGZERO ;
138 for (i = 0 ; i < myNClass ; i++)
139 myLogAlpha = elnsum(myLogAlpha, elnproduct(mLogAlpha[n][i][t], eln(theHMM.mTransMatVector[t][i][j]))) ;
140
141 mLogAlpha[n][j][t+1] = elnproduct(myLogAlpha, eln(theCondProba[n][j][t+1])) ;
142 mLogRho[n][t+1] = elnsum(mLogRho[n][t+1], mLogAlpha[n][j][t+1]) ;
143 }
144 }
145
146 // backward
147 for (i = 0 ; i < myNClass ; i++)
148 mLogBeta[n][i][myT-1] = 0.0 ; // log(1)
149
150 for (t = myT-2 ; t >= 0 ; t--)
151 {
152 for (i = 0 ; i < myNClass ; i++)
153 { myAux = LOGZERO ;
154 for (j = 0 ; j < myNClass ; j++)
155 myAux = elnsum(myAux, elnproduct(eln(theHMM.mTransMatVector[t+1][i][j]), elnproduct(eln(theCondProba[n][j][t+1]), mLogBeta[n][j][t+1]))) ; // FIXME: mTransMatVector just t instead of t+1?
156 mLogBeta[n][i][t] = myAux ;
157 }
158 }
159
160 // Calcul des Gamma et LogVrais
161 // erreur ? mLogVrais[n] = LOGZERO ;
162 mLogVrais[n] = mLogRho[n][myT-1] ;
163 for (t = 0 ; t < myT ; t++)
164 { mySum = LOGZERO ;
165 for (i = 0 ; i < myNClass ; i++)
166 { mLogGamma[n][i][t] = elnproduct(mLogAlpha[n][i][t], mLogBeta[n][i][t]) ;
167 mySum = elnsum(mySum, mLogGamma[n][i][t]) ;
168 }
169 for (i = 0 ; i < myNClass ; i++)
170 mLogGamma[n][i][t] = elnproduct(mLogGamma[n][i][t], -mySum) ;
171
172 // Erreur mLogVrais[n] = elnsum(mLogVrais[n], mLogRho[n][t]) ;
173 }
174
175 // Calcul des Xsi
176 for (i = 0 ; i < myNClass ; i++)
177 for (j = 0 ; j < myNClass ; j++)
178 { mSumLogXsi[n][i][j] = LOGZERO ;
179 for (t = 0 ; t < myT - 1 ; t++)
180 { mLogXsi[n][t][i][j] = elnproduct(mLogAlpha[n][i][t], elnproduct(theHMM.mTransMatVector[0][i][j], elnproduct(theCondProba[n][j][t+1], mLogBeta[n][j][t+1]))) ;/* FIXME */
181 mSumLogXsi[n][i][j] = elnsum(mSumLogXsi[n][i][j],mLogXsi[n][t][i][j]) ;
182 }
183 }
184
185 // fin
186 for (i = 0 ; i < myNClass ; i++)
187 { for (t = 0 ; t < myT ; t++)
188 { mLogAlpha[n][i][t] = eexp(mLogAlpha[n][i][t]) ;
189 mLogBeta[n][i][t] = eexp(mLogBeta[n][i][t]) ;
190 mLogGamma[n][i][t] = eexp(mLogGamma[n][i][t]) ;
191 }
192 for (j = 0 ; j < myNClass ; j++)
193 { mSumLogXsi[n][i][j] = eexp(mSumLogXsi[n][i][j]) ;
194 for (t = 0 ; t < myT - 1 ; t++)
195 mLogXsi[n][t][i][j] = eexp(mLogXsi[n][t][i][j]) ;
196 }
197 }
198 for (t = 0 ; t < myT ; t++)
199 mLogRho[n][t] = eexp(mLogRho[n][t]) ;
200
201 }
202 }
203