Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | Directories | File List | Namespace Members | Class Members | File Members | Related Pages

cg.cpp

Go to the documentation of this file.
00001 //============================================================
00002 // COOOL           version 1.1           ---     Nov,  1995
00003 //   Center for Wave Phenomena, Colorado School of Mines
00004 //============================================================
00005 //
00006 //   This code is part of a preliminary release of COOOL (CWP
00007 // Object-Oriented Optimization Library) and associated class 
00008 // libraries. 
00009 //
00010 // The COOOL library is a free software. You can do anything you want
00011 // with it including make a fortune.  However, neither the authors,
00012 // the Center for Wave Phenomena, nor anyone else you can think of
00013 // makes any guarantees about anything in this package or any aspect
00014 // of its functionality.
00015 //
00016 // Since you've got the source code you can also modify the
00017 // library to suit your own purposes. We would appreciate it 
00018 // if the headers that identify the authors are kept in the 
00019 // source code.
00020 //
00021 //=============================
00022 // Definition of the conjugate gradient class
00023 // Non-linear conjugate gradient algorithm
00024 // author:  Wenceslau Gouveia
00025 // modified:  H. Lydia Deng, 02/23/94,  /03/14/94
00026 //=============================
00027 
00028 #include "CG.hh"
00029 #include "defs.hh"
00030    
00031 
00032 static const char*  myNameIs =  "Conjugate Gradient";
00033  
00034 const char* ConjugateGradient::className() const { 
00035         return (myNameIs);
00036 }
00037 
00038 ConjugateGradient::ConjugateGradient(LineSearch* p, int it, double eps) 
00039 : LineSearchOptima(p)
00040 {
00041    iterMax      =       it;
00042    tol          =       eps;
00043    iterNum      =       0;
00044 }
00045 ConjugateGradient::ConjugateGradient(LineSearch* p, int it, double eps, int verb) 
00046 : LineSearchOptima(p, verb)
00047 {
00048         iterMax         =       it;
00049         tol             =       eps;
00050         iterNum         =       0;
00051     }
00052 
00053 Model<double> ConjugateGradient::optimizer(Model<double>& model0)
00054 { 
00055    //reset the residue history for every new optimizer
00056     
00057     iterNum = 0;
00058    isSuccess = 0;
00059    if (residue != NULL) 
00060    {
00061         delete residue;
00062         residue = new List<double>;
00063    }
00064 
00065    int n = model0.modSize();
00066    Model<double>                model1(model0);                 // new model 
00067    Vector<double>               search(n);              // search direction
00068    Vector<double>               g0(n);                  // old gradient vector
00069    Vector<double>               g1(n);                  // new gradient vector
00070    double                       beta;                   // beta parameter
00071    double                       lambda = .025;          // line search parameter
00072    double                       descent = 0.;           // descent direction
00073 
00074 // Beginning iterations
00075    g0           =       ls->gradient(model0);
00076    
00077 
00078 // check the gradient, in case the initial model is the optimal, Lydia 03/08/95
00079    double err = (double)sqrt(g0*g0);
00080    if (isVerbose) cerr << "Initial residue : " << err << endl;
00081    NonQuadraticOptima::appendResidue(err);      // residual
00082    if (err < tol) {
00083          if (isVerbose) cerr << "Initial guess was great! \n";
00084          isSuccess = 1;
00085          return model0;
00086       }
00087         
00088 
00089    // Considering first iteration 
00090    search = -1. * g0;
00091    descent = search * g0;
00092 
00093    model1 = ls->search(model0, search, descent, lambda);
00094    g1 = ls->gradient(model1);           // Gradient at new model
00095    err = (double)sqrt(g1*g1);
00096    if (isVerbose) cerr << "Iteration (0) : " << "current value of the objective function: "
00097       << ls->currentValue() << "\t current residue: "<< err << endl;
00098    NonQuadraticOptima::appendResidue(err);      // residual
00099 
00100    iterNum = 0;
00101    double temp;
00102    do 
00103    {
00104       iterNum++;
00105 
00106       temp      =       1./(g0*g0);
00107       beta      =       (g1-g0)*g1;             
00108       beta      *=      temp;                   // computation Polak & Ribiere
00109 
00110       search =  beta * search - g1;             // search direction
00111       
00112       descent = search * g1;                    // descent
00113       if (descent > 0.)
00114       {
00115          if (isVerbose)
00116             cerr << "Reset searching directions to gradient! \n";
00117          
00118          search = -g1;
00119          descent = search * g1; 
00120       } 
00121 
00122       model0 = model1;
00123       g0 = g1;  // save the old model and gradient before new search
00124       
00125       model1 = ls->search(model0, search, descent, lambda); // line search
00126       g1 = ls->gradient(model1);
00127             
00128       err = (double)sqrt(g1*g1);
00129       if (isVerbose) 
00130          cerr << "Iteration (" << iterNum << ") : "<<"current value of the objective function: "
00131             <<ls->currentValue() << "\t current residue: "<< err << endl;
00132       NonQuadraticOptima::appendResidue(err);   // residual
00133 
00134    } while (residue->last() > tol && iterNum < iterMax); // stopping criterion
00135 
00136    if (residue->last() <= tol) isSuccess = 1;
00137    
00138    return(model1);                      // hopefully answer
00139 }
00140 
00141 Model<long> ConjugateGradient::optimizer(Model<long>& model0)
00142 {
00143     Model<double> temp(model0);
00144     temp =  optimizer(temp);
00145     Model<long> m(temp);
00146     return m;
00147 }

Generated on Wed Dec 15 21:20:28 2004 for vuVolume by  doxygen 1.3.9.1