NewtonMethod.cpp

Go to the documentation of this file.
00001 /******************************************************************************/
00002 /*                                                                            */
00003 /*   N E W T O N   M E T H O D   C L A S S                                    */
00004 /*                                                                            */
00005 /*   Roberto Lopez                                                            */
00006 /*   International Center for Numerical Methods in Engineering (CIMNE)        */
00007 /*   Technical University of Catalonia (UPC)                                  */
00008 /*   Barcelona, Spain                                                         */
00009 /*   E-mail: rlopez@cimne.upc.edu                                             */
00010 /*                                                                            */
00011 /******************************************************************************/
00012 
00013 #include "NewtonMethod.h"
00014 
00015 #include <iostream>
00016 #include <fstream>
00017 #include <stdlib.h>
00018 #include <math.h>
00019 #include <time.h>
00020 
00021 namespace Purple
00022 {
00023 
00024 // GENERAL CONSTRUCTOR 
00025 
00026 /// General constructor. It creates a Newton's method object
00027 /// associated to an objective function object.
00028 /// It also initializes the class members to their default values:
00029 ///
00030 /// Initial argument: Random point whithin the objective function domain.
00031 ///
00032 /// Stopping criteria:
00033 /// <ul> 
00034 /// <li> Evaluation goal: -1.0e69.
00035 /// <li> Gradient norm goal: 0.0.
00036 /// <li> Maximum time: 1.0e6.
00037 /// <li> Maximum number of iterations: 1000. 
00038 /// </ul> 
00039 ///  
00040 /// User stuff: 
00041 /// <ul>
00042 /// <li> Show period: 25. 
00043 /// </ul>
00044 ///
00045 /// @param newObjectiveFunction: Pointer to an objective function object.
00046 ///
00047 /// @see ObjectiveFunction.
00048 /// @see OptimizationAlgorithm.
00049 
00050 NewtonMethod::NewtonMethod(ObjectiveFunction* newObjectiveFunction)
00051 : OptimizationAlgorithm(newObjectiveFunction)
00052 {
00053    // Optimization parameters
00054 
00055    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00056 
00057    Vector<double> lowerBound = objectiveFunction->getLowerBound();
00058    Vector<double> upperBound = objectiveFunction->getUpperBound();
00059 
00060    Vector<double> newInitialArgument(numberOfVariables, 0.0);
00061 
00062    for(int i = 0; i < numberOfVariables; i++)
00063    {
00064       double random = (double)rand()/(RAND_MAX+1.0);
00065 
00066       newInitialArgument[i] 
00067       = lowerBound[i] + (upperBound[i] - lowerBound[i])*random;
00068    }
00069 
00070    initialArgument = newInitialArgument;
00071 
00072    // Stopping criteria
00073 
00074    evaluationGoal = 0.0;
00075    gradientNormGoal = 0.0;
00076    maximumTime = 1.0e6;
00077    maximumNumberOfIterations = 100;
00078 
00079    // User stuff
00080 
00081    showPeriod = 25;
00082 }
00083 
00084 
00085 // DEFAULT CONSTRUCTOR
00086 
00087 /// Default constructor. It creates a Newton's method optimization algorithm object
00088 /// not associated to any objective function object.
00089 /// It also initializes the class members to their default values:
00090 ///
00091 /// Stopping criteria:
00092 /// <ul> 
00093 /// <li> Evaluation goal: -1.0e69.
00094 /// <li> Gradient norm goal: 0.0.
00095 /// <li> Maximum time: 1.0e6.
00096 /// <li> Maximum number of iterations: 1000. 
00097 /// </ul> 
00098 ///  
00099 /// User stuff: 
00100 /// <ul>
00101 /// <li> Show period: 25. 
00102 /// </ul>
00103 ///
00104 /// @see OptimizationAlgorithm.
00105 
00106 NewtonMethod::NewtonMethod(void) : OptimizationAlgorithm()
00107 {
00108    // Stopping criteria
00109 
00110    evaluationGoal = 0.0;
00111    gradientNormGoal = 0.0;
00112    maximumTime = 1.0e6;
00113    maximumNumberOfIterations = 100;
00114 
00115    // User stuff
00116 
00117    showPeriod = 25;
00118 }
00119 
00120 
00121 // DESTRUCTOR
00122 
00123 /// Destructor.
00124 
00125 NewtonMethod::~NewtonMethod(void)
00126 {
00127 
00128 }
00129 
00130 
00131 // METHODS
00132 
00133 
00134 // Vector<double> getInitialArgument(void)
00135 
00136 /// This method returns the initial objective function argument to be used by 
00137 /// the Newton's method for optimization. 
00138 
00139 Vector<double> NewtonMethod::getInitialArgument(void)
00140 {
00141    return(initialArgument);
00142 }
00143 
00144 
00145 // double getGradientNormGoal(void) method
00146 
00147 /// This method returns the goal value for the norm of the objective function
00148 /// gradient.
00149 /// This is used as a stopping criterium when optimizing a function.
00150 
00151 double NewtonMethod::getGradientNormGoal(void)
00152 {
00153    return(gradientNormGoal);
00154 }
00155 
00156 
00157 // int getMaximumNumberOfIterations(void) method
00158 
00159 /// This method returns the maximum number of iterations to be performed by the 
00160 /// Newton's method during the optimization process. 
00161 /// This is used as a stopping criterium when optimizing an objective function.
00162 
00163 int NewtonMethod::getMaximumNumberOfIterations(void)
00164 {
00165    return(maximumNumberOfIterations);
00166 }
00167 
00168 
00169 // int getShowPeriod(void) method
00170 
00171 /// This method returns the number of iterations between the optimization 
00172 /// showing progress. 
00173 
00174 int NewtonMethod::getShowPeriod(void)
00175 {
00176    return(showPeriod);    
00177 }
00178 
00179 
00180 // void setInitialArgument(Vector<double>) method
00181 
00182 /// This method sets a new initial objective function argument to be used by 
00183 /// the Newton's method for optimization. 
00184 ///
00185 /// @param newInitialArgument: Initial argument Vector.
00186 
00187 void NewtonMethod::setInitialArgument(Vector<double> newInitialArgument)
00188 {
00189    int size = newInitialArgument.getSize();
00190 
00191    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00192 
00193    if(size != numberOfVariables)
00194    {
00195       std::cout << std::endl
00196                 << "Error: NewtonMethod class. "
00197                 << "double setInitialArgument(Vector<double>) method." << std::endl
00198                 << "Size of initial argument must be equal to number of variables."
00199                 << std::endl << std::endl;
00200 
00201       exit(1);
00202    }
00203 
00204 
00205    initialArgument = newInitialArgument;
00206 }
00207 
00208 
00209 // void setGradientNormGoal(double) method
00210 
00211 /// This method sets a new the goal value for the norm of the 
00212 /// objective function gradient. 
00213 /// This is used as a stopping criterium when optimizing an objective function.
00214 ///
00215 /// @param newGradientNormGoal: 
00216 /// Goal value for the norm of the objective function gradient.
00217 
00218 void NewtonMethod::setGradientNormGoal(double newGradientNormGoal)
00219 {
00220    if(gradientNormGoal < 0.0)
00221    {
00222       std::cout << std::endl
00223                 << "Error: NewtonMethod class." << std::endl
00224                 << "void setGradientNormGoal(double) method."
00225                 << std::endl
00226                 << "Gradient norm goal must be equal or greater than 0."
00227                 << std::endl << std::endl;
00228       exit(1);
00229    }
00230 
00231    // Set gradient norm goal
00232 
00233    gradientNormGoal = newGradientNormGoal;
00234 }
00235 
00236 
00237 // void setMaximumNumberOfIterations(int) method
00238 
00239 /// This method sets a new maximum number of iterations in the optimization 
00240 /// process. 
00241 ///
00242 /// @param newMaximumNumberOfIterations: Maximum number of iterations.
00243 
00244 void NewtonMethod
00245 ::setMaximumNumberOfIterations(int newMaximumNumberOfIterations)
00246 {
00247    if(newMaximumNumberOfIterations <= 0)
00248    {
00249       std::cout << std::endl
00250                 << "Error: NewtonMethod class." << std::endl
00251                 << "void setMaximumNumberOfIterations(int) method."
00252                 << std::endl
00253                 << "Maximum number of iterations must be greater than 0."
00254                 << std::endl
00255                 << std::endl;
00256 
00257       exit(1);
00258    }
00259 
00260    // Set maximum number of iterations
00261 
00262    maximumNumberOfIterations = newMaximumNumberOfIterations;
00263 }
00264 
00265 
00266 
00267 // void setShowPeriod(int) method
00268 
00269 /// This method sets a new number of iterations between the optimization
00270 /// showing progress. 
00271 ///
00272 /// @param newShowPeriod: Show period.
00273 
00274 void NewtonMethod::setShowPeriod(int newShowPeriod)
00275 {
00276    if(newShowPeriod <= 0)
00277    {
00278       std::cout << std::endl
00279                 << "Error: NewtonMethod class." << std::endl
00280                 << "void setShowPeriod(int) method."
00281                 << std::endl
00282                 << "Show period must be greater than 0."
00283                 << std::endl << std::endl;
00284 
00285       exit(1);
00286    }
00287 
00288    // Set show period
00289 
00290    showPeriod = newShowPeriod;
00291 }
00292 
00293 
00294 // void getMinimalArgument(void) method
00295 
00296 /// This method optimizes an objective function according to the 
00297 /// Newton's method. 
00298 /// It returns the minimal argument of the objective function.
00299 /// Optimization occurs according to the optimization parameters. 
00300 
00301 Vector<double> NewtonMethod::getMinimalArgument(void)
00302 {
00303    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00304 
00305    Vector<double> minimalArgument(numberOfVariables, 0.0);
00306    Vector<double> argument(numberOfVariables, 0.0);
00307 
00308    // Evaluation history vector
00309 
00310    Vector<double> newEvaluationHistory(maximumNumberOfIterations+1, 0.0);
00311 
00312    evaluationHistory = newEvaluationHistory;
00313 
00314    // Gradient norm optimization history vector
00315 
00316    Vector<double> newGradientNormHistory(maximumNumberOfIterations+1, 0.0);
00317 
00318    gradientNormHistory = newGradientNormHistory;
00319 
00320    double evaluation = 0.0;
00321 
00322    Vector<double> gradient(numberOfVariables, 0.0);
00323 
00324    double gradientNorm = 0.0;
00325 
00326    Matrix<double> hessian(numberOfVariables, numberOfVariables, 0.0);
00327    Matrix<double> inverseHessian(numberOfVariables, numberOfVariables, 0.0);
00328    Vector<double> inverseHessianGradientProduct(numberOfVariables, 0.0);
00329 
00330    time_t beginningTime, currentTime;
00331 
00332    double elapsedTime = 0.0;
00333 
00334    // Set beginning time 
00335 
00336    time(&beginningTime);
00337 
00338    // Main loop
00339 
00340    std::cout << std::endl
00341              << "Getting minimal argument with Newton's method..." 
00342              << std::endl;
00343 
00344    argument = initialArgument;
00345     
00346    // Initial objective function evaluation
00347    
00348    evaluation = objectiveFunction->getEvaluation(argument);
00349 
00350    evaluationHistory[0] = evaluation;
00351 
00352    if(evaluation <= evaluationGoal)
00353    {          
00354       std::cout << std::endl
00355                 << "Initial evaluation is less than goal." << std::endl
00356                 << "Initial evaluation: " << evaluation << std::endl;
00357       
00358       minimalArgument = argument;
00359 
00360       // Print minimal argument to screen
00361 
00362       std::cout << std::endl
00363                 << "Minimal argument:" << std::endl;
00364    
00365       for(int i = 0; i < numberOfVariables; i++)
00366       {
00367          std::cout << minimalArgument[i] << " ";        
00368       }
00369       
00370       return(minimalArgument);        
00371    }
00372    else
00373    {
00374       std::cout << "Initial evaluation: " <<  evaluation << std::endl;      
00375    }
00376 
00377    // Initial objective function gradient
00378 
00379    gradient = objectiveFunction->getGradient(argument);
00380 
00381    gradientNorm = objectiveFunction->getGradientNorm(gradient);
00382 
00383    gradientNormHistory[0] = gradientNorm;
00384 
00385    if(gradientNorm <= gradientNormGoal)
00386    {          
00387       std::cout << std::endl
00388                 << "Initial gradient norm is less than goal." << std::endl
00389                 << "Initial gradient norm: " << gradientNorm << std::endl;
00390               
00391       minimalArgument = argument;
00392      
00393       // Print minimal argument to screen
00394 
00395       std::cout << std::endl
00396                 << "Minimal argument:" << std::endl;
00397    
00398       for(int i = 0; i < numberOfVariables; i++)
00399       {
00400          std::cout << minimalArgument[i] << " ";        
00401       }
00402       
00403       return(minimalArgument);        
00404    }
00405    else
00406    {
00407       std::cout << "Initial gradient norm: " <<  gradientNorm << std::endl;      
00408    }
00409 
00410    // Loop over iterations
00411 
00412    for(int iteration = 1; iteration <= maximumNumberOfIterations; iteration++)
00413    {
00414       // Objective function Hessian
00415 
00416       inverseHessian = objectiveFunction->getInverseHessian(argument);
00417 
00418       // Inverse Hessian - gradient product
00419 
00420       for(int i = 0; i < numberOfVariables; i++)
00421       {
00422          for(int j = 0; j < numberOfVariables; j++)
00423          {
00424             inverseHessianGradientProduct[i] += inverseHessian[i][j]*gradient[j];
00425          }
00426       }
00427 
00428       // Get new argument
00429 
00430       for (int i = 0; i < numberOfVariables; i++)
00431       {
00432          argument[i] -= inverseHessianGradientProduct[i];
00433       }
00434       
00435       
00436       // Objective function evaluation
00437    
00438       evaluation = objectiveFunction->getEvaluation(argument);
00439 
00440       evaluationHistory[iteration] = evaluation;
00441 
00442       // Objective function gradient
00443 
00444       gradient = objectiveFunction->getGradient(argument);
00445 
00446       gradientNorm = objectiveFunction->getGradientNorm(gradient);
00447 
00448       gradientNormHistory[iteration] = gradientNorm;
00449       
00450 
00451       // Stopping Criteria
00452 
00453       // Evaluation goal 
00454 
00455       if (evaluation <= evaluationGoal)
00456       {
00457          std::cout << std::endl
00458                    << "Iteration " << iteration << ": "
00459                    << "Evaluation goal reached." << std::endl;
00460 
00461          std::cout << "Evaluation: " << evaluation  << std::endl;
00462          std::cout << "Gradient norm: " << gradientNorm << std::endl;
00463 
00464          break;
00465       }
00466 
00467       // Norm of objective function gradient goal 
00468 
00469       if (gradientNorm <= gradientNormGoal)
00470       {
00471          std::cout << std::endl
00472                    << "Iteration " << iteration << ": "
00473                    << "Gradient norm goal reached."
00474                    << std::endl;  
00475 
00476          std::cout << "Evaluation: " << evaluation << ";" << std::endl;
00477          std::cout << "Gradient norm: " << gradientNorm << ";" << std::endl;
00478 
00479          break;
00480       }
00481 
00482       // Maximum optimization time
00483 
00484       time(&currentTime);
00485 
00486       elapsedTime = difftime(currentTime, beginningTime);
00487 
00488       if (elapsedTime >= maximumTime)
00489       {
00490          std::cout << std::endl
00491                    << "Iteration " << iteration << ": "
00492                    << "Maximum optimization time reached."
00493                    << std::endl;
00494 
00495          std::cout << "Evaluation: " << evaluation << ";" << std::endl;
00496          std::cout << "Gradient norm: " << gradientNorm << ";" << std::endl;
00497 
00498          break;
00499       }
00500 
00501       // Maximum number of iterations
00502 
00503       if (iteration == maximumNumberOfIterations)
00504       {
00505          std::cout << std::endl
00506                    << "Iteration " << iteration << ": "
00507                    << "Maximum number of iterations reached."
00508                    << std::endl;
00509 
00510          std::cout << "Evaluation: " << evaluation << std::endl;
00511          std::cout << "Gradient norm: " << gradientNorm << std::endl;
00512 
00513          break;
00514       }
00515 
00516       // Progress
00517 
00518       if(iteration % showPeriod == 0)
00519       {
00520          std::cout << std::endl
00521                    << "Iteration " << iteration << "; " << std::endl;
00522 
00523          std::cout << "Evaluation: " << evaluation << ";" << std::endl;
00524          std::cout << "Gradient norm: " << gradientNorm << ";" << std::endl;
00525       }
00526    }
00527 
00528    // Set minimal argument
00529 
00530    minimalArgument = argument;
00531 
00532    // Print minimal argument to screen
00533 
00534    std::cout << std::endl
00535              << "Minimal argument:" << std::endl;
00536    
00537    for(int i = 0; i < numberOfVariables; i++)
00538    {
00539       std::cout << minimalArgument[i] << " ";        
00540    }
00541 
00542    std::cout << std::endl;
00543    
00544    return(minimalArgument);
00545 }
00546 
00547 
00548 // void print(void) method
00549 
00550 /// This method prints to the screen the initial argumetn and the 
00551 /// stopping criteria concerning the Newton's method object:
00552 ///
00553 /// Initial argument.
00554 ///
00555 /// Stopping criteria:
00556 /// <ul> 
00557 /// <li> Evaluation goal.
00558 /// <li> Gradient norm goal.
00559 /// <li> Maximum time.
00560 /// <li> Maximum number of iterations. 
00561 /// </ul> 
00562 ///  
00563 /// User stuff: 
00564 /// <ul>
00565 /// <li> Show period. 
00566 /// </ul>
00567 
00568 void NewtonMethod::print(void)
00569 {
00570    std::cout << std::endl
00571              << "Newton's Method Object." << std::endl;
00572 
00573    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00574 
00575    // Initial argument
00576 
00577    std::cout << std::endl
00578              << "Initial argument:" << std::endl;
00579 
00580    for(int i = 0; i < numberOfVariables; i++)
00581    {
00582       std::cout << initialArgument[i] << " ";        
00583    }
00584    
00585    std::cout << std::endl;
00586 
00587    // Stopping criteria
00588 
00589    std::cout << std::endl
00590              << "Stopping criteria: " << std::endl
00591              << "Evaluation goal: " << std::endl
00592              << evaluationGoal << std::endl
00593              << "Gradient norm goal" << std::endl 
00594              << gradientNormGoal <<std::endl
00595              << "Maximum time: " << std::endl
00596              << maximumTime << std::endl
00597              << "Maximum number of iterations: " << std::endl
00598              << maximumNumberOfIterations << std::endl;
00599 
00600    // User stuff
00601 
00602    std::cout << std::endl
00603              << "User stuff: " << std::endl
00604              << "Show period: " << std::endl
00605              << showPeriod
00606              << std::endl;
00607 
00608 }
00609 
00610 
00611 // void save(char*) method
00612 
00613 /// This method saves the Newton's method object to a data file. 
00614 ///
00615 /// Initial argument.
00616 ///
00617 /// Stopping criteria:
00618 /// <ul> 
00619 /// <li> Evaluation goal.
00620 /// <li> Gradient norm goal.
00621 /// <li> Maximum time.
00622 /// <li> Maximum number of iterations. 
00623 /// </ul> 
00624 ///  
00625 /// User stuff: 
00626 /// <ul>
00627 /// <li> Show period. 
00628 /// </ul>
00629 ///
00630 /// @param filename: Filename.
00631 ///
00632 /// @see load(char*).
00633 
00634 void NewtonMethod::save(char* filename)
00635 {
00636    // File
00637 
00638    std::fstream file;
00639 
00640    file.open(filename, std::ios::out);
00641 
00642    if(!file.is_open())
00643    {
00644       std::cout << std::endl 
00645                 << "Error: NewtonMethod class." << std::endl
00646                 << "void save(char*) method."
00647                 << std::endl
00648                 << "Cannot open Newton method object data file."  << std::endl
00649                 << std::endl;
00650 
00651       exit(1);
00652    }
00653    else
00654    {
00655       std::cout << std::endl
00656                 << "Saving Newton method object to data file..." << std::endl;
00657    }
00658 
00659    // Write file header
00660 
00661    file << "% Purple: An Open Source Numerical Optimization C++ Library." 
00662         << std::endl 
00663         << "% Newton Method Object." << std::endl; 
00664 
00665    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00666  
00667    // Initial argument
00668 
00669    file << "InitialArgument:" << std::endl;
00670 
00671    for(int i = 0; i < numberOfVariables; i++)
00672    {
00673       file << initialArgument[i] << " ";        
00674    }
00675    
00676    file << std::endl;
00677 
00678    // Stopping criteria
00679 
00680    file << "EvaluationGoal:" << std::endl
00681         << evaluationGoal << std::endl
00682         << "GradientNormGoal:" << std::endl
00683         << gradientNormGoal << std::endl
00684         << "MaximumTime: " << std::endl
00685         << maximumTime << std::endl
00686         << "MaximumNumberOfIterations: " << std::endl
00687         << maximumNumberOfIterations << std::endl;
00688 
00689    // User stuff
00690 
00691    file << "ShowPeriod: " << std::endl
00692         << showPeriod << std::endl;
00693 
00694    file.close();
00695 }
00696 
00697 
00698 // void load(char*) method
00699 
00700 /// This method loads a Newton method object from a data file. 
00701 /// Please mind about the file format, wich is specified in the User's Guide. 
00702 ///
00703 ///
00704 /// Initial argument.
00705 ///
00706 /// Stopping criteria:
00707 /// <ul> 
00708 /// <li> Evaluation goal.
00709 /// <li> Gradient norm goal.
00710 /// <li> Maximum time.
00711 /// <li> Maximum number of iterations. 
00712 /// </ul> 
00713 ///  
00714 /// User stuff: 
00715 /// <ul>
00716 /// <li> Show period. 
00717 /// </ul>
00718 ///
00719 /// @param filename: Filename.
00720 ///
00721 /// @see save(char*).
00722 
00723 void NewtonMethod::load(char* filename)
00724 {
00725    // File
00726 
00727    std::fstream file;
00728 
00729    file.open(filename, std::ios::in);
00730 
00731    if(!file.is_open())
00732    {
00733       std::cout << std::endl
00734                 << "Error: NewtonMethod class." << std::endl
00735                 << "void load(char*) method."
00736                 << std::endl
00737                 << "Cannot open Newton method object data file."  << std::endl;
00738 
00739       exit(1);
00740    }
00741    else
00742    {
00743       std::cout << std::endl
00744                 << "Loading Newton method object from data file..."
00745                 << std::endl;
00746    }
00747 
00748    std::string word;
00749 
00750    // Initial argument
00751 
00752    while(word != "InitialArgument:")
00753    {
00754       file >> word;
00755    }
00756 
00757    int numberOfVariables = objectiveFunction->getNumberOfVariables();
00758 
00759    for(int i = 0; i < numberOfVariables; i++)
00760    {
00761       file >> initialArgument[i];        
00762    }
00763 
00764    // Stopping criteria: 
00765 
00766    // Evaluation goal
00767 
00768    file >> word;
00769 
00770    file >> evaluationGoal;
00771 
00772    // Norm of objective function gradient goal
00773 
00774    file >> word;
00775 
00776    file >> gradientNormGoal;
00777 
00778    // Maximum time
00779 
00780    file >> word;
00781 
00782    file >> maximumTime;
00783 
00784    // Maximum number of iterations
00785 
00786    file >> word;
00787 
00788    file >> maximumNumberOfIterations;
00789 
00790    // User stuff: 
00791 
00792    // Iterations between showing progress
00793 
00794    file >> word;
00795 
00796    file >> showPeriod;
00797 
00798    // Close file
00799 
00800    file.close();
00801 }
00802 
00803 
00804 // void saveOptimizationHistory(char*) method 
00805 
00806 /// This method saves the optimization history to a data file:
00807 ///
00808 /// <ol>
00809 /// <li> Iteration.
00810 /// <li> Objective function evaluation.
00811 /// <li> Objective function gradient norm.
00812 /// </ol>
00813 ///
00814 /// @param filename: Filename.
00815 
00816 void NewtonMethod::saveOptimizationHistory(char* filename)
00817 {
00818    std::fstream file; 
00819 
00820    file.open(filename, std::ios::out);
00821 
00822    // Write file header 
00823 
00824    if(!file.is_open())
00825    {
00826       std::cout << std::endl 
00827                 << "Error: NewtonMethod class. " << std::endl
00828                 << "void saveOptimizationHistory(char*) method."
00829                 << std::endl
00830                 << "Cannot open optimization history data file." << std::endl
00831                 << std::endl;
00832 
00833       exit(1);
00834    }
00835    else
00836    {
00837       std::cout << std::endl 
00838                 << "Saving optimization history to data file..." << std::endl;
00839    }
00840 
00841    // Write file header
00842 
00843    file << "% Purple: An Open Source Numerical Optimization C++ Library." 
00844         << std::endl 
00845         << "% Newton Method Optimization History." << std::endl
00846         << "% 1 - Iteration." << std::endl
00847         << "% 2 - Objective function evaluation." << std::endl
00848         << "% 3 - Objective function gradient norm." << std::endl;
00849 
00850    // Write file data
00851 
00852    int size = evaluationHistory.getSize();
00853 
00854    for (int i = 0; i < size; i++)
00855    {
00856       file << i << " "
00857            << evaluationHistory[i] << " "
00858            << gradientNormHistory[i] << std::endl;
00859    }
00860 
00861    file << std::endl;
00862 
00863    file.close();
00864 }
00865 
00866 }
00867 
00868 
00869 // Purple: An Open Source Numerical Optimization C++ Library.
00870 // Copyright (C) 2005 Roberto Lopez 
00871 //
00872 // This library is free software; you can redistribute it and/or
00873 // modify it under the terms of the GNU Lesser General Public
00874 // License as published by the Free Software Foundation; either
00875 // version 2.1 of the License, or any later version.
00876 //
00877 // This library is distributed in the hope that it will be useful,
00878 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00879 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00880 // Lesser General Public License for more details.
00881 
00882 // You should have received a copy of the GNU Lesser General Public
00883 // License along with this library; if not, write to the Free Software
00884 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA

Generated on Wed Jun 21 13:10:38 2006 for Purple by  doxygen 1.4.7