Refactoring of the LineSearchMinimizer.

1. New LineSearchDirection interface, factory and instances.
2. Cleanup of LineSearchMinimizer to use the State and Direction objects.
3. LBFGS -> LowRankInverseHessian.
4. Refactoring of the RunCallbacks function and share it across
   LineSearchMinimizer and TrustRegionMinimizer.

Change-Id: I19354afc6f5d6567b28918710c2012dc30ef8f32
diff --git a/internal/ceres/CMakeLists.txt b/internal/ceres/CMakeLists.txt
index 6b664e4..786a3d7 100644
--- a/internal/ceres/CMakeLists.txt
+++ b/internal/ceres/CMakeLists.txt
@@ -57,15 +57,17 @@
     gradient_checking_cost_function.cc
     implicit_schur_complement.cc
     iterative_schur_complement_solver.cc
-    lbfgs.cc
     levenberg_marquardt_strategy.cc
     line_search.cc
+    line_search_direction.cc
     line_search_minimizer.cc
     linear_least_squares_problems.cc
     linear_operator.cc
     linear_solver.cc
     local_parameterization.cc
     loss_function.cc
+    low_rank_inverse_hessian.cc
+    minimizer.cc
     normal_prior.cc
     parameter_block_ordering.cc
     partitioned_matrix_view.cc
diff --git a/internal/ceres/line_search_direction.cc b/internal/ceres/line_search_direction.cc
new file mode 100644
index 0000000..f441607
--- /dev/null
+++ b/internal/ceres/line_search_direction.cc
@@ -0,0 +1,144 @@
+// Ceres Solver - A fast non-linear least squares minimizer
+// Copyright 2012 Google Inc. All rights reserved.
+// http://code.google.com/p/ceres-solver/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+//   this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+//   this list of conditions and the following disclaimer in the documentation
+//   and/or other materials provided with the distribution.
+// * Neither the name of Google Inc. nor the names of its contributors may be
+//   used to endorse or promote products derived from this software without
+//   specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: sameeragarwal@google.com (Sameer Agarwal)
+
+#include "ceres/line_search_direction.h"
+#include "ceres/line_search_minimizer.h"
+#include "ceres/low_rank_inverse_hessian.h"
+#include "ceres/internal/eigen.h"
+#include "glog/logging.h"
+
+namespace ceres {
+namespace internal {
+
+class SteepestDescent : public LineSearchDirection {
+ public:
+  virtual ~SteepestDescent() {}
+  bool NextDirection(const LineSearchMinimizer::State& previous,
+                     const LineSearchMinimizer::State& current,
+                     Vector* search_direction) {
+    *search_direction = -current.gradient;
+    return true;
+  }
+};
+
+class NonlinearConjugateGradient : public LineSearchDirection {
+ public:
+  NonlinearConjugateGradient(const NonlinearConjugateGradientType type,
+                             const double function_tolerance)
+      : type_(type),
+        function_tolerance_(function_tolerance) {
+  }
+
+  bool NextDirection(const LineSearchMinimizer::State& previous,
+                     const LineSearchMinimizer::State& current,
+                     Vector* search_direction) {
+    double beta = 0.0;
+    Vector gradient_change;
+    switch (type_) {
+      case FLETCHER_REEVES:
+        beta = current.gradient_squared_norm / previous.gradient_squared_norm;
+        break;
+      case POLAK_RIBIRERE:
+        gradient_change = current.gradient - previous.gradient;
+        beta = (current.gradient.dot(gradient_change) /
+                previous.gradient_squared_norm);
+        break;
+      case HESTENES_STIEFEL:
+        gradient_change = current.gradient - previous.gradient;
+        beta =  (current.gradient.dot(gradient_change) /
+                 previous.search_direction.dot(gradient_change));
+        break;
+      default:
+        LOG(FATAL) << "Unknown nonlinear conjugate gradient type: " << type_;
+    }
+
+    *search_direction =  -current.gradient + beta * previous.search_direction;
+    const double directional_derivative = current. gradient.dot(*search_direction);
+    if (directional_derivative > -function_tolerance_) {
+      LOG(WARNING) << "Restarting non-linear conjugate gradients: "
+                   << directional_derivative;
+      *search_direction = -current.gradient;
+    };
+
+    return true;
+  }
+
+ private:
+  const NonlinearConjugateGradientType type_;
+  const double function_tolerance_;
+};
+
+class LBFGS : public LineSearchDirection {
+ public:
+  LBFGS(const int num_parameters, const int max_lbfgs_rank)
+      : low_rank_inverse_hessian_(num_parameters, max_lbfgs_rank) {}
+
+  virtual ~LBFGS() {}
+
+  bool NextDirection(const LineSearchMinimizer::State& previous,
+                     const LineSearchMinimizer::State& current,
+                     Vector* search_direction) {
+    low_rank_inverse_hessian_.Update(
+        previous.search_direction * previous.step_size,
+        current.gradient - previous.gradient);
+    search_direction->setZero();
+    low_rank_inverse_hessian_.RightMultiply(current.gradient.data(),
+                                            search_direction->data());
+    *search_direction *= -1.0;
+    return true;
+  }
+
+ private:
+  LowRankInverseHessian low_rank_inverse_hessian_;
+};
+
+LineSearchDirection*
+LineSearchDirection::Create(LineSearchDirection::Options& options) {
+  if (options.type == STEEPEST_DESCENT) {
+    return new SteepestDescent;
+  }
+
+  if (options.type == NONLINEAR_CONJUGATE_GRADIENT) {
+    return new NonlinearConjugateGradient(
+        options.nonlinear_conjugate_gradient_type,
+        options.function_tolerance);
+  }
+
+  if (options.type == ceres::LBFGS) {
+    return new ceres::internal::LBFGS(options.num_parameters,
+                                      options.max_lbfgs_rank);
+  }
+
+  LOG(ERROR) << "Unknown line search direction type: " << options.type;
+  return NULL;
+}
+
+}  // namespace internal
+}  // namespace ceres
diff --git a/internal/ceres/line_search_direction.h b/internal/ceres/line_search_direction.h
new file mode 100644
index 0000000..d92c362
--- /dev/null
+++ b/internal/ceres/line_search_direction.h
@@ -0,0 +1,71 @@
+// Ceres Solver - A fast non-linear least squares minimizer
+// Copyright 2012 Google Inc. All rights reserved.
+// http://code.google.com/p/ceres-solver/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+//   this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+//   this list of conditions and the following disclaimer in the documentation
+//   and/or other materials provided with the distribution.
+// * Neither the name of Google Inc. nor the names of its contributors may be
+//   used to endorse or promote products derived from this software without
+//   specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: sameeragarwal@google.com (Sameer Agarwal)
+
+#ifndef CERES_INTERNAL_LINE_SEARCH_DIRECTION_H_
+#define CERES_INTERNAL_LINE_SEARCH_DIRECTION_H_
+
+#include "ceres/internal/eigen.h"
+#include "ceres/line_search_minimizer.h"
+#include "ceres/types.h"
+
+namespace ceres {
+namespace internal {
+
+class LineSearchDirection {
+ public:
+  struct Options {
+    Options()
+        : num_parameters(0),
+          type(LBFGS),
+          nonlinear_conjugate_gradient_type(FLETCHER_REEVES),
+          function_tolerance(1e-12),
+          max_lbfgs_rank(20) {
+    }
+
+    int num_parameters;
+    LineSearchDirectionType type;
+    NonlinearConjugateGradientType nonlinear_conjugate_gradient_type;
+    double function_tolerance;
+    int max_lbfgs_rank;
+  };
+
+  static LineSearchDirection* Create(Options& options);
+
+  virtual ~LineSearchDirection() {}
+  virtual bool NextDirection(const LineSearchMinimizer::State& previous,
+                             const LineSearchMinimizer::State& current,
+                             Vector* search_direction) = 0;
+
+};
+
+}  // namespace internal
+}  // namespace ceres
+
+#endif // CERES_INTERNAL_LINE_SEARCH_DIRECTION_H_
diff --git a/internal/ceres/line_search_minimizer.cc b/internal/ceres/line_search_minimizer.cc
index a52d4de..3392194 100644
--- a/internal/ceres/line_search_minimizer.cc
+++ b/internal/ceres/line_search_minimizer.cc
@@ -43,18 +43,17 @@
 #include <algorithm>
 #include <cstdlib>
 #include <cmath>
-#include <cstring>
-#include <limits>
 #include <string>
 #include <vector>
 
 #include "Eigen/Dense"
 #include "ceres/array_utils.h"
-#include "ceres/lbfgs.h"
 #include "ceres/evaluator.h"
 #include "ceres/internal/eigen.h"
+#include "ceres/internal/port.h"
 #include "ceres/internal/scoped_ptr.h"
 #include "ceres/line_search.h"
+#include "ceres/line_search_direction.h"
 #include "ceres/stringprintf.h"
 #include "ceres/types.h"
 #include "ceres/wall_time.h"
@@ -65,35 +64,31 @@
 namespace {
 // Small constant for various floating point issues.
 const double kEpsilon = 1e-12;
-}  // namespace
-
-// Execute the list of IterationCallbacks sequentially. If any one of
-// the callbacks does not return SOLVER_CONTINUE, then stop and return
-// its status.
-CallbackReturnType LineSearchMinimizer::RunCallbacks(
-    const IterationSummary& iteration_summary) {
-  for (int i = 0; i < options_.callbacks.size(); ++i) {
-    const CallbackReturnType status =
-        (*options_.callbacks[i])(iteration_summary);
-    if (status != SOLVER_CONTINUE) {
-      return status;
-    }
+bool Evaluate(Evaluator* evaluator,
+              const Vector& x,
+              LineSearchMinimizer::State* state) {
+  const bool status = evaluator->Evaluate(x.data(),
+                                          &(state->cost),
+                                          NULL,
+                                          state->gradient.data(),
+                                          NULL);
+  if (status) {
+    state->gradient_squared_norm = state->gradient.squaredNorm();
+    state->gradient_max_norm = state->gradient.lpNorm<Eigen::Infinity>();
   }
-  return SOLVER_CONTINUE;
+
+  return status;
 }
 
-void LineSearchMinimizer::Init(const Minimizer::Options& options) {
-  options_ = options;
-}
+}  // namespace
 
 void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
                                    double* parameters,
                                    Solver::Summary* summary) {
   double start_time = WallTimeInSeconds();
   double iteration_start_time =  start_time;
-  Init(options);
 
-  Evaluator* evaluator = CHECK_NOTNULL(options_.evaluator);
+  Evaluator* evaluator = CHECK_NOTNULL(options.evaluator);
   const int num_parameters = evaluator->NumParameters();
   const int num_effective_parameters = evaluator->NumEffectiveParameters();
 
@@ -103,21 +98,12 @@
 
   VectorRef x(parameters, num_parameters);
 
-  Vector gradient(num_effective_parameters);
-  double gradient_squared_norm;
-  Vector previous_gradient(num_effective_parameters);
-  Vector gradient_change(num_effective_parameters);
-  double previous_gradient_squared_norm = 0.0;
-
-  Vector search_direction(num_effective_parameters);
-  Vector previous_search_direction(num_effective_parameters);
+  State current_state(num_parameters, num_effective_parameters);
+  State previous_state(num_parameters, num_effective_parameters);
 
   Vector delta(num_effective_parameters);
   Vector x_plus_delta(num_parameters);
 
-  double directional_derivative = 0.0;
-  double previous_directional_derivative = 0.0;
-
   IterationSummary iteration_summary;
   iteration_summary.iteration = 0;
   iteration_summary.step_is_valid = false;
@@ -129,31 +115,28 @@
   iteration_summary.step_solver_time_in_seconds = 0;
 
   // Do initial cost and Jacobian evaluation.
-  double cost = 0.0;
-  double previous_cost = 0.0;
-  if (!evaluator->Evaluate(x.data(), &cost, NULL, gradient.data(), NULL)) {
+  if (!Evaluate(evaluator, x, &current_state)) {
     LOG(WARNING) << "Terminating: Cost and gradient evaluation failed.";
     summary->termination_type = NUMERICAL_FAILURE;
     return;
   }
 
-  gradient_squared_norm = gradient.squaredNorm();
-  iteration_summary.cost = cost + summary->fixed_cost;
-  iteration_summary.gradient_max_norm = gradient.lpNorm<Eigen::Infinity>();
+  iteration_summary.cost = current_state.cost + summary->fixed_cost;
+  iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
 
   // The initial gradient max_norm is bounded from below so that we do
   // not divide by zero.
-  const double gradient_max_norm_0 =
+  const double initial_gradient_max_norm =
       max(iteration_summary.gradient_max_norm, kEpsilon);
   const double absolute_gradient_tolerance =
-      options_.gradient_tolerance * gradient_max_norm_0;
+      options.gradient_tolerance * initial_gradient_max_norm;
 
   if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
     summary->termination_type = GRADIENT_TOLERANCE;
     VLOG(1) << "Terminating: Gradient tolerance reached."
             << "Relative gradient max norm: "
-            << iteration_summary.gradient_max_norm / gradient_max_norm_0
-            << " <= " << options_.gradient_tolerance;
+            << iteration_summary.gradient_max_norm / initial_gradient_max_norm
+            << " <= " << options.gradient_tolerance;
     return;
   }
 
@@ -164,23 +147,14 @@
       + summary->preprocessor_time_in_seconds;
   summary->iterations.push_back(iteration_summary);
 
-  // Call the various callbacks.  TODO(sameeragarwal): Here and in
-  // trust_region_minimizer make this into a function that can be
-  // shared.
-  switch (RunCallbacks(iteration_summary)) {
-    case SOLVER_TERMINATE_SUCCESSFULLY:
-      summary->termination_type = USER_SUCCESS;
-      VLOG(1) << "Terminating: User callback returned USER_SUCCESS.";
-      return;
-    case SOLVER_ABORT:
-      summary->termination_type = USER_ABORT;
-      VLOG(1) << "Terminating: User callback returned  USER_ABORT.";
-      return;
-    case SOLVER_CONTINUE:
-      break;
-    default:
-      LOG(FATAL) << "Unknown type of user callback status";
-  }
+  LineSearchDirection::Options line_search_direction_options;
+  line_search_direction_options.num_parameters = num_effective_parameters;
+  line_search_direction_options.type = options.line_search_direction_type;
+  line_search_direction_options.nonlinear_conjugate_gradient_type =
+      options.nonlinear_conjugate_gradient_type;
+  line_search_direction_options.max_lbfgs_rank = options.max_lbfgs_rank;
+  scoped_ptr<LineSearchDirection> line_search_direction(
+      LineSearchDirection::Create(line_search_direction_options));
 
   LineSearchFunction line_search_function(evaluator);
   LineSearch::Options line_search_options;
@@ -191,14 +165,13 @@
   ArmijoLineSearch line_search;
   LineSearch::Summary line_search_summary;
 
-  scoped_ptr<LBFGS> lbfgs;
-  if (options_.line_search_direction_type == ceres::LBFGS) {
-    lbfgs.reset(new LBFGS(num_effective_parameters, options_.max_lbfgs_rank));
-  }
-
   while (true) {
+    if (!RunCallbacks(options.callbacks, iteration_summary, summary)) {
+      return;
+    }
+
     iteration_start_time = WallTimeInSeconds();
-    if (iteration_summary.iteration >= options_.max_num_iterations) {
+    if (iteration_summary.iteration >= options.max_num_iterations) {
       summary->termination_type = NO_CONVERGENCE;
       VLOG(1) << "Terminating: Maximum number of iterations reached.";
       break;
@@ -206,175 +179,98 @@
 
     const double total_solver_time = iteration_start_time - start_time +
         summary->preprocessor_time_in_seconds;
-    if (total_solver_time >= options_.max_solver_time_in_seconds) {
+    if (total_solver_time >= options.max_solver_time_in_seconds) {
       summary->termination_type = NO_CONVERGENCE;
       VLOG(1) << "Terminating: Maximum solver time reached.";
       break;
     }
 
-    previous_search_direction = search_direction;
-
     iteration_summary = IterationSummary();
     iteration_summary.iteration = summary->iterations.back().iteration + 1;
-    iteration_summary.step_is_valid = false;
-    iteration_summary.step_is_successful = false;
 
+    bool line_search_status = true;
     if (iteration_summary.iteration == 1) {
-      search_direction = -gradient;
-      directional_derivative = -gradient_squared_norm;
+      current_state.search_direction = -current_state.gradient;
     } else {
-      if (lbfgs.get() != NULL) {
-        lbfgs->Update(delta, gradient_change);
-      }
-
-      // TODO(sameeragarwal): This should probably be refactored into
-      // a set of functions. But we will do that once things settle
-      // down in this solver.
-      switch (options_.line_search_direction_type) {
-        case STEEPEST_DESCENT:
-          search_direction = -gradient;
-          directional_derivative = -gradient_squared_norm;
-          break;
-
-        case NONLINEAR_CONJUGATE_GRADIENT:
-          {
-            double beta = 0.0;
-
-            switch (options_.nonlinear_conjugate_gradient_type) {
-              case FLETCHER_REEVES:
-                beta = gradient.squaredNorm() /
-                    previous_gradient_squared_norm;
-                break;
-
-              case POLAK_RIBIRERE:
-                gradient_change = gradient - previous_gradient;
-                beta = gradient.dot(gradient_change) /
-                    previous_gradient_squared_norm;
-                break;
-
-              case HESTENES_STIEFEL:
-                gradient_change = gradient - previous_gradient;
-                beta = gradient.dot(gradient_change) /
-                    previous_search_direction.dot(gradient_change);
-                break;
-
-              default:
-                LOG(FATAL) << "Unknown nonlinear conjugate gradient type: "
-                           << options_.nonlinear_conjugate_gradient_type;
-            }
-
-            search_direction = -gradient + beta * previous_search_direction;
-          }
-
-          directional_derivative =  gradient.dot(search_direction);
-          if (directional_derivative > -options.function_tolerance) {
-            LOG(WARNING) << "Restarting non-linear conjugate gradients: "
-                         << directional_derivative;
-            search_direction = -gradient;
-            directional_derivative = -gradient_squared_norm;
-          }
-          break;
-
-        case ceres::LBFGS:
-          search_direction.setZero();
-          lbfgs->RightMultiply(gradient.data(), search_direction.data());
-          search_direction *= -1.0;
-          directional_derivative =  gradient.dot(search_direction);
-          break;
-
-        default:
-          LOG(FATAL) << "Unknown line search direction type: "
-                     << options_.line_search_direction_type;
-      }
+      line_search_status = line_search_direction->NextDirection(
+          previous_state,
+          current_state,
+          &current_state.search_direction);
     }
 
+    if (!line_search_status) {
+      LOG(WARNING) << "Line search direction computation failed. "
+          "Resorting to steepest descent.";
+      current_state.search_direction = -current_state.gradient;
+    }
+
+    line_search_function.Init(x, current_state.search_direction);
+    current_state.directional_derivative =
+        current_state.gradient.dot(current_state.search_direction);
+
     // TODO(sameeragarwal): Refactor this into its own object and add
     // explanations for the various choices.
     const double initial_step_size = (iteration_summary.iteration == 1)
-        ? min(1.0, 1.0 / gradient.lpNorm<Eigen::Infinity>())
-        : min(1.0, 2.0 * (cost - previous_cost) / directional_derivative);
+        ? min(1.0, 1.0 / current_state.gradient_max_norm)
+        : min(1.0, 2.0 * (current_state.cost - previous_state.cost) /
+              current_state.directional_derivative);
 
-    previous_cost = cost;
-    previous_gradient = gradient;
-    previous_gradient_squared_norm = gradient_squared_norm;
-    previous_directional_derivative = directional_derivative;
-
-    line_search_function.Init(x, search_direction);
     line_search.Search(line_search_options,
                        initial_step_size,
-                       cost,
-                       directional_derivative,
+                       current_state.cost,
+                       current_state.directional_derivative,
                        &line_search_summary);
 
-    delta = line_search_summary.optimal_step_size * search_direction;
+    current_state.step_size = line_search_summary.optimal_step_size;
+    delta = current_state.step_size * current_state.search_direction;
+
+    previous_state = current_state;
+
     // TODO(sameeragarwal): Collect stats.
     if (!evaluator->Plus(x.data(), delta.data(), x_plus_delta.data()) ||
-        !evaluator->Evaluate(x_plus_delta.data(),
-                             &cost,
-                             NULL,
-                             gradient.data(),
-                             NULL)) {
+        !Evaluate(evaluator, x_plus_delta, &current_state)) {
       LOG(WARNING) << "Evaluation failed.";
-      cost = previous_cost;
-      gradient = previous_gradient;
     } else {
       x = x_plus_delta;
-      gradient_squared_norm = gradient.squaredNorm();
     }
 
-
-    iteration_summary.cost = cost + summary->fixed_cost;
-    iteration_summary.cost_change = previous_cost - cost;
-    iteration_summary.step_norm = delta.norm();
-    iteration_summary.gradient_max_norm = gradient.lpNorm<Eigen::Infinity>();
-    iteration_summary.step_is_valid = true;
-    iteration_summary.step_is_successful = true;
-    iteration_summary.step_norm = delta.norm();
-    iteration_summary.step_size =  line_search_summary.optimal_step_size;
-    iteration_summary.line_search_function_evaluations =
-        line_search_summary.num_evaluations;
-
+    iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
     if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
       summary->termination_type = GRADIENT_TOLERANCE;
       VLOG(1) << "Terminating: Gradient tolerance reached."
               << "Relative gradient max norm: "
-              << iteration_summary.gradient_max_norm / gradient_max_norm_0
-              << " <= " << options_.gradient_tolerance;
+              << iteration_summary.gradient_max_norm / initial_gradient_max_norm
+              << " <= " << options.gradient_tolerance;
       break;
     }
 
+    iteration_summary.cost_change = previous_state.cost - current_state.cost;
     const double absolute_function_tolerance =
-        options_.function_tolerance * previous_cost;
+        options.function_tolerance * previous_state.cost;
     if (fabs(iteration_summary.cost_change) < absolute_function_tolerance) {
       VLOG(1) << "Terminating. Function tolerance reached. "
               << "|cost_change|/cost: "
-              << fabs(iteration_summary.cost_change) / previous_cost
-              << " <= " << options_.function_tolerance;
+              << fabs(iteration_summary.cost_change) / previous_state.cost
+              << " <= " << options.function_tolerance;
       summary->termination_type = FUNCTION_TOLERANCE;
       return;
     }
 
+    iteration_summary.cost = current_state.cost + summary->fixed_cost;
+    iteration_summary.step_norm = delta.norm();
+    iteration_summary.step_is_valid = true;
+    iteration_summary.step_is_successful = true;
+    iteration_summary.step_norm = delta.norm();
+    iteration_summary.step_size =  current_state.step_size;
+    iteration_summary.line_search_function_evaluations =
+        line_search_summary.num_evaluations;
     iteration_summary.iteration_time_in_seconds =
         WallTimeInSeconds() - iteration_start_time;
     iteration_summary.cumulative_time_in_seconds =
         WallTimeInSeconds() - start_time
         + summary->preprocessor_time_in_seconds;
-    summary->iterations.push_back(iteration_summary);
 
-    switch (RunCallbacks(iteration_summary)) {
-      case SOLVER_TERMINATE_SUCCESSFULLY:
-        summary->termination_type = USER_SUCCESS;
-        VLOG(1) << "Terminating: User callback returned USER_SUCCESS.";
-        return;
-      case SOLVER_ABORT:
-        summary->termination_type = USER_ABORT;
-        VLOG(1) << "Terminating: User callback returned  USER_ABORT.";
-        return;
-      case SOLVER_CONTINUE:
-        break;
-      default:
-        LOG(FATAL) << "Unknown type of user callback status";
-    }
+    summary->iterations.push_back(iteration_summary);
   }
 }
 
diff --git a/internal/ceres/line_search_minimizer.h b/internal/ceres/line_search_minimizer.h
index a9556fd..f82f139 100644
--- a/internal/ceres/line_search_minimizer.h
+++ b/internal/ceres/line_search_minimizer.h
@@ -34,6 +34,8 @@
 #include "ceres/minimizer.h"
 #include "ceres/solver.h"
 #include "ceres/types.h"
+#include "ceres/internal/eigen.h"
+#include "glog/logging.h"
 
 namespace ceres {
 namespace internal {
@@ -43,17 +45,33 @@
 // For example usage, see SolverImpl::Minimize.
 class LineSearchMinimizer : public Minimizer {
  public:
+  struct State {
+    State(int num_parameters,
+          int num_effective_parameters)
+        : cost(0.0),
+          gradient(num_effective_parameters),
+          gradient_squared_norm(0.0),
+          search_direction(num_effective_parameters),
+          directional_derivative(0.0),
+          step_size(0.0) {
+    }
+
+    double cost;
+    Vector gradient;
+    double gradient_squared_norm;
+    double gradient_max_norm;
+    Vector search_direction;
+    double directional_derivative;
+    double step_size;
+  };
+
   ~LineSearchMinimizer() {}
   virtual void Minimize(const Minimizer::Options& options,
                         double* parameters,
                         Solver::Summary* summary);
-
- private:
-  void Init(const Minimizer::Options& options);
-  CallbackReturnType RunCallbacks(const IterationSummary& iteration_summary);
-  Minimizer::Options options_;
 };
 
 }  // namespace internal
 }  // namespace ceres
+
 #endif  // CERES_INTERNAL_LINE_SEARCH_MINIMIZER_H_
diff --git a/internal/ceres/lbfgs.cc b/internal/ceres/low_rank_inverse_hessian.cc
similarity index 90%
rename from internal/ceres/lbfgs.cc
rename to internal/ceres/low_rank_inverse_hessian.cc
index 48067c8..3fe113f 100644
--- a/internal/ceres/lbfgs.cc
+++ b/internal/ceres/low_rank_inverse_hessian.cc
@@ -28,14 +28,15 @@
 //
 // Author: sameeragarwal@google.com (Sameer Agarwal)
 
-#include "glog/logging.h"
-#include "ceres/lbfgs.h"
 #include "ceres/internal/eigen.h"
+#include "ceres/low_rank_inverse_hessian.h"
+#include "glog/logging.h"
 
 namespace ceres {
 namespace internal {
 
-LBFGS::LBFGS(int num_parameters, int max_num_corrections)
+LowRankInverseHessian::LowRankInverseHessian(int num_parameters,
+                                             int max_num_corrections)
     : num_parameters_(num_parameters),
       max_num_corrections_(max_num_corrections),
       num_corrections_(0),
@@ -45,7 +46,8 @@
       delta_x_dot_delta_gradient_(max_num_corrections) {
 }
 
-bool LBFGS::Update(const Vector& delta_x, const Vector& delta_gradient) {
+bool LowRankInverseHessian::Update(const Vector& delta_x,
+                                   const Vector& delta_gradient) {
   const double delta_x_dot_delta_gradient = delta_x.dot(delta_gradient);
   if (delta_x_dot_delta_gradient <= 1e-10) {
     VLOG(2) << "Skipping LBFGS Update. " << delta_x_dot_delta_gradient;
@@ -79,7 +81,8 @@
   return true;
 }
 
-void LBFGS::RightMultiply(const double* x_ptr, double* y_ptr) const {
+void LowRankInverseHessian::RightMultiply(const double* x_ptr,
+                                          double* y_ptr) const {
   ConstVectorRef gradient(x_ptr, num_parameters_);
   VectorRef search_direction(y_ptr, num_parameters_);
 
diff --git a/internal/ceres/lbfgs.h b/internal/ceres/low_rank_inverse_hessian.h
similarity index 71%
rename from internal/ceres/lbfgs.h
rename to internal/ceres/low_rank_inverse_hessian.h
index f6281d8..5d59c14 100644
--- a/internal/ceres/lbfgs.h
+++ b/internal/ceres/low_rank_inverse_hessian.h
@@ -29,14 +29,10 @@
 // Author: sameeragarwal@google.com (Sameer Agarwal)
 //
 // Limited memory positive definite approximation to the inverse
-// Hessian, using the work of
-//
-// Nocedal, J. (1980). "Updating Quasi-Newton Matrices with Limited
-// Storage". Mathematics of Computation 35 (151): 773–782.
-//
-// Byrd, R. H.; Nocedal, J.; Schnabel, R. B. (1994).
-// "Representations of Quasi-Newton Matrices and their use in
-// Limited Memory Methods". Mathematical Programming 63 (4):
+// Hessian, using the LBFGS algorithm
+
+#ifndef CERES_INTERNAL_LOW_RANK_INVERSE_HESSIAN_H_
+#define CERES_INTERNAL_LOW_RANK_INVERSE_HESSIAN_H_
 
 #include "ceres/internal/eigen.h"
 #include "ceres/linear_operator.h"
@@ -44,22 +40,39 @@
 namespace ceres {
 namespace internal {
 
-// Right multiplying with an LBFGS operator is equivalent to
-// multiplying by the inverse Hessian.
-class LBFGS : public LinearOperator {
+// LowRankInverseHessian is a positive definite approximation to the
+// Hessian using the limited memory variant of the
+// Broyden-Fletcher-Goldfarb-Shanno (BFGS)secant formula for
+// approximating the Hessian.
+//
+// Other update rules like the Davidon-Fletcher-Powell (DFP) are
+// possible, but the BFGS rule is considered the best performing one.
+//
+// The limited memory variant was developed by Nocedal and further
+// enhanced with scaling rule by Byrd, Nocedal and Schanbel.
+//
+// Nocedal, J. (1980). "Updating Quasi-Newton Matrices with Limited
+// Storage". Mathematics of Computation 35 (151): 773–782.
+//
+// Byrd, R. H.; Nocedal, J.; Schnabel, R. B. (1994).
+// "Representations of Quasi-Newton Matrices and their use in
+// Limited Memory Methods". Mathematical Programming 63 (4):
+class LowRankInverseHessian : public LinearOperator {
  public:
   // num_parameters is the row/column size of the Hessian.
   // max_num_corrections is the rank of the Hessian approximation.
   // The approximation uses:
   // 2 * max_num_corrections * num_parameters + max_num_corrections
   // doubles.
-  LBFGS(int num_parameters, int max_num_corrections);
-  virtual ~LBFGS() {}
+  LowRankInverseHessian(int num_parameters, int max_num_corrections);
+  virtual ~LowRankInverseHessian() {}
 
-  // Update the low rank approximation, i.e. store delta_x and
-  // delta_gradient, and get rid of the oldest delta_x and
-  // delta_gradient vectors if the number of corrections is already
-  // equal to max_num_corrections.
+  // Update the low rank approximation. delta_x is the change in the
+  // domain of Hessian, and delta_gradient is the change in the
+  // gradient.  The update copies the delta_x and delta_gradient
+  // vectors, and gets rid of the oldest delta_x and delta_gradient
+  // vectors if the number of corrections is already equal to
+  // max_num_corrections.
   bool Update(const Vector& delta_x, const Vector& delta_gradient);
 
   // LinearOperator interface
@@ -82,3 +95,5 @@
 
 }  // namespace internal
 }  // namespace ceres
+
+#endif  // CERES_INTERNAL_LOW_RANK_INVERSE_HESSIAN_H_
diff --git a/internal/ceres/minimizer.cc b/internal/ceres/minimizer.cc
new file mode 100644
index 0000000..912ac99
--- /dev/null
+++ b/internal/ceres/minimizer.cc
@@ -0,0 +1,67 @@
+// Ceres Solver - A fast non-linear least squares minimizer
+// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
+// http://code.google.com/p/ceres-solver/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+//   this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+//   this list of conditions and the following disclaimer in the documentation
+//   and/or other materials provided with the distribution.
+// * Neither the name of Google Inc. nor the names of its contributors may be
+//   used to endorse or promote products derived from this software without
+//   specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: sameeragarwal@google.com (Sameer Agarwal)
+
+#include "ceres/minimizer.h"
+#include "ceres/types.h"
+#include "glog/logging.h"
+
+namespace ceres {
+namespace internal {
+
+Minimizer::~Minimizer() {};
+
+bool Minimizer::RunCallbacks(const vector<IterationCallback*> callbacks,
+                             const IterationSummary& iteration_summary,
+                             Solver::Summary* summary) {
+  CallbackReturnType status = SOLVER_CONTINUE;
+  int i = 0;
+  while (status == SOLVER_CONTINUE && i < callbacks.size()) {
+    status = (*callbacks[i])(iteration_summary);
+    ++i;
+  }
+  switch (status) {
+    case SOLVER_CONTINUE:
+      return true;
+    case SOLVER_TERMINATE_SUCCESSFULLY:
+      summary->termination_type = USER_SUCCESS;
+      VLOG(1) << "Terminating: User callback returned USER_SUCCESS.";
+      return false;
+    case SOLVER_ABORT:
+      summary->termination_type = USER_ABORT;
+      VLOG(1) << "Terminating: User callback returned  USER_ABORT.";
+      return false;
+    default:
+      LOG(FATAL) << "Unknown type of user callback status";
+  }
+  return false;
+}
+
+}  // namespace internal
+}  // namespace ceres
diff --git a/internal/ceres/minimizer.h b/internal/ceres/minimizer.h
index 2be3a3b..708974d 100644
--- a/internal/ceres/minimizer.h
+++ b/internal/ceres/minimizer.h
@@ -143,8 +143,11 @@
     Minimizer* inner_iteration_minimizer;
   };
 
-  virtual ~Minimizer() {}
+  static bool RunCallbacks(const vector<IterationCallback*> callbacks,
+                           const IterationSummary& iteration_summary,
+                           Solver::Summary* summary);
 
+  virtual ~Minimizer();
   // Note: The minimizer is expected to update the state of the
   // parameters array every iteration. This is required for the
   // StateUpdatingCallback to work.
diff --git a/internal/ceres/trust_region_minimizer.cc b/internal/ceres/trust_region_minimizer.cc
index db2641d..5ea9374 100644
--- a/internal/ceres/trust_region_minimizer.cc
+++ b/internal/ceres/trust_region_minimizer.cc
@@ -58,21 +58,6 @@
 const double kEpsilon = 1e-12;
 }  // namespace
 
-// Execute the list of IterationCallbacks sequentially. If any one of
-// the callbacks does not return SOLVER_CONTINUE, then stop and return
-// its status.
-CallbackReturnType TrustRegionMinimizer::RunCallbacks(
-    const IterationSummary& iteration_summary) {
-  for (int i = 0; i < options_.callbacks.size(); ++i) {
-    const CallbackReturnType status =
-        (*options_.callbacks[i])(iteration_summary);
-    if (status != SOLVER_CONTINUE) {
-      return status;
-    }
-  }
-  return SOLVER_CONTINUE;
-}
-
 // Compute a scaling vector that is used to improve the conditioning
 // of the Jacobian.
 void TrustRegionMinimizer::EstimateScale(const SparseMatrix& jacobian,
@@ -182,16 +167,16 @@
 
   // The initial gradient max_norm is bounded from below so that we do
   // not divide by zero.
-  const double gradient_max_norm_0 =
+  const double initial_gradient_max_norm =
       max(iteration_summary.gradient_max_norm, kEpsilon);
   const double absolute_gradient_tolerance =
-      options_.gradient_tolerance * gradient_max_norm_0;
+      options_.gradient_tolerance * initial_gradient_max_norm;
 
   if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
     summary->termination_type = GRADIENT_TOLERANCE;
     VLOG(1) << "Terminating: Gradient tolerance reached."
             << "Relative gradient max norm: "
-            << iteration_summary.gradient_max_norm / gradient_max_norm_0
+            << iteration_summary.gradient_max_norm / initial_gradient_max_norm
             << " <= " << options_.gradient_tolerance;
     return;
   }
@@ -203,24 +188,12 @@
       + summary->preprocessor_time_in_seconds;
   summary->iterations.push_back(iteration_summary);
 
-  // Call the various callbacks.
-  switch (RunCallbacks(iteration_summary)) {
-    case SOLVER_TERMINATE_SUCCESSFULLY:
-      summary->termination_type = USER_SUCCESS;
-      VLOG(1) << "Terminating: User callback returned USER_SUCCESS.";
-      return;
-    case SOLVER_ABORT:
-      summary->termination_type = USER_ABORT;
-      VLOG(1) << "Terminating: User callback returned  USER_ABORT.";
-      return;
-    case SOLVER_CONTINUE:
-      break;
-    default:
-      LOG(FATAL) << "Unknown type of user callback status";
-  }
-
-  int num_consecutive_invalid_steps = 0;
+   int num_consecutive_invalid_steps = 0;
   while (true) {
+    if (!RunCallbacks(options.callbacks, iteration_summary, summary)) {
+      return;
+    }
+
     iteration_start_time = WallTimeInSeconds();
     if (iteration_summary.iteration >= options_.max_num_iterations) {
       summary->termination_type = NO_CONVERGENCE;
@@ -356,7 +329,7 @@
             new_cost = x_plus_delta_cost;
           } else {
             x_plus_delta = inner_iteration_x;
-            // Bost the model_cost_change, since the inner iteration
+            // Boost the model_cost_change, since the inner iteration
             // improvements are not accounted for by the trust region.
             model_cost_change +=  x_plus_delta_cost - new_cost;
             VLOG(2) << "Inner iteration succeeded; current cost: " << cost
@@ -461,7 +434,7 @@
         summary->termination_type = GRADIENT_TOLERANCE;
         VLOG(1) << "Terminating: Gradient tolerance reached."
                 << "Relative gradient max norm: "
-                << iteration_summary.gradient_max_norm / gradient_max_norm_0
+                << iteration_summary.gradient_max_norm / initial_gradient_max_norm
                 << " <= " << options_.gradient_tolerance;
         return;
       }
@@ -534,21 +507,6 @@
         WallTimeInSeconds() - start_time
         + summary->preprocessor_time_in_seconds;
     summary->iterations.push_back(iteration_summary);
-
-    switch (RunCallbacks(iteration_summary)) {
-      case SOLVER_TERMINATE_SUCCESSFULLY:
-        summary->termination_type = USER_SUCCESS;
-        VLOG(1) << "Terminating: User callback returned USER_SUCCESS.";
-        return;
-      case SOLVER_ABORT:
-        summary->termination_type = USER_ABORT;
-        VLOG(1) << "Terminating: User callback returned  USER_ABORT.";
-        return;
-      case SOLVER_CONTINUE:
-        break;
-      default:
-        LOG(FATAL) << "Unknown type of user callback status";
-    }
   }
 }
 
diff --git a/internal/ceres/trust_region_minimizer.h b/internal/ceres/trust_region_minimizer.h
index 49f5733..173b0f3 100644
--- a/internal/ceres/trust_region_minimizer.h
+++ b/internal/ceres/trust_region_minimizer.h
@@ -52,7 +52,6 @@
  private:
   void Init(const Minimizer::Options& options);
   void EstimateScale(const SparseMatrix& jacobian, double* scale) const;
-  CallbackReturnType RunCallbacks(const IterationSummary& iteration_summary);
   bool MaybeDumpLinearLeastSquaresProblem( const int iteration,
                                            const SparseMatrix* jacobian,
                                            const double* residuals,