1
2 #ifndef CGNL_HPP_
3 #define CGNL_HPP_
4
5 template<class R,class DJ>
6 R argmin(R rho,const DJ & dJ, KN_<R> &x,KN_<R> &h,KN_<R> &g,KN_<R> &w)
7 {
8 // Find ro such thah (dJ(x+ro h),h) =0
9 // remark input: dJ(x)=g
10 int k=0;
11 // g=dJ*x; // pour est sure
12 R ro0=0, ro=rho,ro1=rho,rold=0;
13 R p0= (g,h),p,p1;
14 if(p0>0) { h=-g; p0=(g,h);
15 cout << "Reset searching directions to gradient! (Wow! says F. hecht) \n";
16 }
17 R ap0=fabs(p0)*0.01; // on arrete quand on a divise par 100.
18
19 x += (ro-rold)* h; rold=ro; g=dJ*x;// dJ(x,g);
20 p= ( p1 = (g,h) );
21 if ( verbosity >=50 )
22 cout << " ro " << ro << " " << p
23 << " rh0= 0 " << p0 << endl;
24
25
26 bool loop=true;
27 while (k++<100 && loop)
28 { // calcul du nouveau ro courant
29
30 if (p0*p1 <0) { // Ok changement de signe
31 R lambda= (-p0/(-p0+p1));
32 if (lambda>0.8) lambda=0.8;
33 if (lambda<0.2) lambda=0.2;
34 ro = ro1*lambda + (1-lambda)*ro0 ;
35 x += (ro-rold)* h; rold=ro; g=dJ*x;// dJ(x,g);
36 assert(ro>1e-30 && ro < 1e+30);
37 p = (g,h);
38 if ( verbosity >=50 )
39 cout << " " << ", rho=" << ro << " gh= " << p
40 << "; ro0, gh0 = " << ro0 << " " << p0
41 << "; ro1, gh1 = " << ro1 << " " << p1 << " " << lambda ;
42
43 if(fabs(p) <= ap0 || k>100 ) {
44 if ( verbosity >=50 )
45 cout << endl << endl;
46 return ro;
47 }
48 if(p0*p<0) {
49 p1=p;
50 ro1=ro;
51 if ( verbosity >=50 ) cout << " +\n";}
52 else {
53 p0=p;
54 ro0=ro;
55 if ( verbosity >=50 ) cout <<" -\n";}
56 }
57 else
58 {
59 ro *=2;
60 p0=p1;
61 x += (ro-rold)* h; rold=ro; g=dJ*x;//dJ(x,g);
62 p = (g,h);
63 p1=p;
64 ro1=ro;
65 if ( verbosity >=50 ) cout <<p<<" " << ro << " 2* " ;
66 }
67
68 }
69 ExecError("NLCG: ArgMin loop (convexe minimization? )");
70 return 0;
71 }
72
73 template<class R,class DJ,class P,class S >
NLCG(const DJ & dJ,const P & C,KN_<R> & x,const int nbitermax,double & eps,long kprint=1000000000,S * Stop=0)74 int NLCG(const DJ & dJ,const P & C,KN_<R> &x,const int nbitermax, double &eps,long kprint=1000000000,S *Stop=0)
75 {
76 // -------------
77 // assert(&x && &dJ && &C);
78 typedef KN<R> Rn;
79 int n=x.N();
80
81 R ro=1;
82 Rn g(n),h(n),Ah(n), & Cg(Ah); // on utilise Ah pour stocke Cg
83 g=dJ*x;// dJ(x,g);
84 Cg = C*g; // gradient preconditionne
85 h =-Cg;
86 R g2 = (Cg,g);
87 if (g2 < 1e-30)
88 { if(kprint>1)
89 cout << "GCNL g^2 =" << g2 << " < 1.e-30 Nothing to do " << endl;
90 return 2; }
91 if (kprint>5 )
92 cout << " 0 GCNL g^2 =" << g2 << endl;
93 R reps2 =eps >0 ? eps*eps*g2 : -eps; // epsilon relatif
94 eps = reps2;
95 for (int iter=0;iter<=nbitermax;iter++)
96 {
97 ro = argmin(ro,dJ,x,h,g,Ah);
98
99 Cg = C*g;
100 R g2p=g2;
101 g2 = (Cg,g);
102 bool stop = Stop && Stop->Stop(iter,x,g);
103 if ( kprint >1 )
104 cout << "CGNL:" <<iter << ", ro = " << ro << " ||g||^2 = " << g2 << endl;
105 if (g2 < reps2 || stop) {
106 if (kprint )
107 cout << "CGNL converge: " << iter <<", ro = " << ro << " ||g||^2 = " << g2 << endl;
108 return 1;// ok
109 }
110 R gamma = g2/g2p;
111 h *= gamma;
112 h -= Cg; // h = -Cg * gamma* h
113 }
114 if(verbosity)
115 cout << " Non convergence of the NL cojugate gradient " <<endl;
116 return 0;
117 }
118
119 #endif //CGNL_HPP_
120