NPL
Neurological Programs and Libraries
bfgs.h
Go to the documentation of this file.
1 /******************************************************************************
2  * Copyright 2014 Micah C Chambers (micahc.vt@gmail.com)
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *
16  * @file bfgs.cpp Declaration of the BFGSOpt class which implements
17  * a BFGS optimization (energy minimization) algorithm.
18  *
19  *****************************************************************************/
20 
21 #ifndef BFGS_H
22 #define BFGS_H
23 
24 #include <iostream>
25 #include <cmath>
26 #include <Eigen/Dense>
27 
28 #include "opt.h"
29 #include "linesearch.h"
30 
31 namespace npl {
32 
37 class BFGSOpt : virtual public Optimizer
38 {
39 private:
43  MatrixXd state_Hinv;
44 
49  Armijo m_lsearch;
50 
51 public:
52 
53  BFGSOpt(size_t dim, const ValFunc& valfunc,
54  const GradFunc& gradfunc,
55  const CallBackFunc& callback = noopCallback);
56 
57  BFGSOpt(size_t dim, const ValFunc& valfunc,
58  const GradFunc& gradfunc,
59  const ValGradFunc& gradAndValFunc,
60  const CallBackFunc& callback = noopCallback);
61 
65  double opt_ls_s;
66 
71  double opt_ls_beta;
72 
76  double opt_ls_sigma;
77 
79 };
80 
83 }
84 
85 #endif // BFGS_H
Implementation of Armijo approximate line search algorithm.
Definition: linesearch.h:36
Definition: accessors.h:29
function< int(const VectorXd &x, double &v, VectorXd &g)> ValGradFunc
Value and Gradient Computation Function.
Definition: opt.h:41
StopReason optimize()
Perform optimization.
int noopCallback(const VectorXd &x, double value, const VectorXd &grad, size_t iter)
Callback that does nothing.
Definition: opt.h:132
function< int(const VectorXd &x, VectorXd &g)> GradFunc
Gradient Only Computation Function.
Definition: opt.h:46
function< int(const VectorXd &x, double &v)> ValFunc
Value Only Computation Function.
Definition: opt.h:51
double opt_ls_sigma
Theshold for stopping linesearch.
Definition: bfgs.h:76
double opt_ls_beta
How quickly to reduce linesearch distance. Power function base, values closer to 0 will decrease step...
Definition: bfgs.h:71
StopReason
Definition: opt.h:141
double opt_ls_s
Maximum step during line search.
Definition: bfgs.h:65
BFGSOpt(size_t dim, const ValFunc &valfunc, const GradFunc &gradfunc, const CallBackFunc &callback=noopCallback)
function< int(const VectorXd &x, double v, const VectorXd &g, size_t iter)> CallBackFunc
Callback function.
Definition: opt.h:56