00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #include "CG.hh"
00029 #include "defs.hh"
00030
00031
00032 static const char* myNameIs = "Conjugate Gradient";
00033
00034 const char* ConjugateGradient::className() const {
00035 return (myNameIs);
00036 }
00037
00038 ConjugateGradient::ConjugateGradient(LineSearch* p, int it, double eps)
00039 : LineSearchOptima(p)
00040 {
00041 iterMax = it;
00042 tol = eps;
00043 iterNum = 0;
00044 }
00045 ConjugateGradient::ConjugateGradient(LineSearch* p, int it, double eps, int verb)
00046 : LineSearchOptima(p, verb)
00047 {
00048 iterMax = it;
00049 tol = eps;
00050 iterNum = 0;
00051 }
00052
00053 Model<double> ConjugateGradient::optimizer(Model<double>& model0)
00054 {
00055
00056
00057 iterNum = 0;
00058 isSuccess = 0;
00059 if (residue != NULL)
00060 {
00061 delete residue;
00062 residue = new List<double>;
00063 }
00064
00065 int n = model0.modSize();
00066 Model<double> model1(model0);
00067 Vector<double> search(n);
00068 Vector<double> g0(n);
00069 Vector<double> g1(n);
00070 double beta;
00071 double lambda = .025;
00072 double descent = 0.;
00073
00074
00075 g0 = ls->gradient(model0);
00076
00077
00078
00079 double err = (double)sqrt(g0*g0);
00080 if (isVerbose) cerr << "Initial residue : " << err << endl;
00081 NonQuadraticOptima::appendResidue(err);
00082 if (err < tol) {
00083 if (isVerbose) cerr << "Initial guess was great! \n";
00084 isSuccess = 1;
00085 return model0;
00086 }
00087
00088
00089
00090 search = -1. * g0;
00091 descent = search * g0;
00092
00093 model1 = ls->search(model0, search, descent, lambda);
00094 g1 = ls->gradient(model1);
00095 err = (double)sqrt(g1*g1);
00096 if (isVerbose) cerr << "Iteration (0) : " << "current value of the objective function: "
00097 << ls->currentValue() << "\t current residue: "<< err << endl;
00098 NonQuadraticOptima::appendResidue(err);
00099
00100 iterNum = 0;
00101 double temp;
00102 do
00103 {
00104 iterNum++;
00105
00106 temp = 1./(g0*g0);
00107 beta = (g1-g0)*g1;
00108 beta *= temp;
00109
00110 search = beta * search - g1;
00111
00112 descent = search * g1;
00113 if (descent > 0.)
00114 {
00115 if (isVerbose)
00116 cerr << "Reset searching directions to gradient! \n";
00117
00118 search = -g1;
00119 descent = search * g1;
00120 }
00121
00122 model0 = model1;
00123 g0 = g1;
00124
00125 model1 = ls->search(model0, search, descent, lambda);
00126 g1 = ls->gradient(model1);
00127
00128 err = (double)sqrt(g1*g1);
00129 if (isVerbose)
00130 cerr << "Iteration (" << iterNum << ") : "<<"current value of the objective function: "
00131 <<ls->currentValue() << "\t current residue: "<< err << endl;
00132 NonQuadraticOptima::appendResidue(err);
00133
00134 } while (residue->last() > tol && iterNum < iterMax);
00135
00136 if (residue->last() <= tol) isSuccess = 1;
00137
00138 return(model1);
00139 }
00140
00141 Model<long> ConjugateGradient::optimizer(Model<long>& model0)
00142 {
00143 Model<double> temp(model0);
00144 temp = optimizer(temp);
00145 Model<long> m(temp);
00146 return m;
00147 }