Evaluation callback API

This adds a callback mechanism to for users to get notified just
before jacobian and residual evaluations. This will enable
aggressive caching and sharing of compute between cost functions.

Change-Id: I67993726920218edf71ab9ae70c34c204756c71a
diff --git a/internal/ceres/CMakeLists.txt b/internal/ceres/CMakeLists.txt
index 55cd0eb..cc43fec 100644
--- a/internal/ceres/CMakeLists.txt
+++ b/internal/ceres/CMakeLists.txt
@@ -325,6 +325,7 @@
   ceres_test(dynamic_numeric_diff_cost_function)
   ceres_test(dynamic_sparse_normal_cholesky_solver)
   ceres_test(dynamic_sparsity)
+  ceres_test(evaluation_callback)
   ceres_test(evaluator)
   ceres_test(gradient_checker)
   ceres_test(gradient_checking_cost_function)
diff --git a/internal/ceres/evaluation_callback_test.cc b/internal/ceres/evaluation_callback_test.cc
new file mode 100644
index 0000000..57e09f0
--- /dev/null
+++ b/internal/ceres/evaluation_callback_test.cc
@@ -0,0 +1,309 @@
+// Ceres Solver - A fast non-linear least squares minimizer
+// Copyright 2018 Google Inc. All rights reserved.
+// http://ceres-solver.org/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+//   this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+//   this list of conditions and the following disclaimer in the documentation
+//   and/or other materials provided with the distribution.
+// * Neither the name of Google Inc. nor the names of its contributors may be
+//   used to endorse or promote products derived from this software without
+//   specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mierle@gmail.com (Keir Mierle)
+
+#include "ceres/solver.h"
+
+#include <limits>
+#include <cmath>
+#include <vector>
+#include "gtest/gtest.h"
+#include "ceres/internal/scoped_ptr.h"
+#include "ceres/sized_cost_function.h"
+#include "ceres/problem.h"
+#include "ceres/problem_impl.h"
+
+namespace ceres {
+namespace internal {
+
+// Use an inline hash function to avoid portability wrangling. Algorithm from
+// Daniel Bernstein, known as the "djb2" hash.
+template<typename T>
+uint64_t Djb2Hash(const T* data, const int size) {
+  uint64_t hash = 5381;
+  const uint8_t* data_as_bytes = reinterpret_cast<const uint8_t*>(data);
+  for (int i = 0; i < sizeof(*data) * size; ++i) {
+    hash = hash * 33 + data_as_bytes[i];
+  }
+  return hash;
+}
+
+const double kUninitialized = 0;
+
+// Generally multiple inheritance is a terrible idea, but in this (test)
+// case it makes for a relatively elegant test implementation.
+struct WigglyBowlCostFunctionAndEvaluationCallback :
+      SizedCostFunction<2, 2>,
+      EvaluationCallback  {
+
+  explicit WigglyBowlCostFunctionAndEvaluationCallback(double *parameter)
+      : EvaluationCallback(),
+        user_parameter_block(parameter),
+        prepare_num_calls(0),
+        evaluate_num_calls(0),
+        evaluate_last_parameter_hash(kUninitialized) {}
+
+  virtual ~WigglyBowlCostFunctionAndEvaluationCallback() {}
+
+  // Evaluation callback interface. This checks that all the preconditions are
+  // met at the point that Ceres calls into it.
+  virtual void PrepareForEvaluation(bool evaluate_jacobians,
+                                    bool new_evaluation_point) {
+    // At this point, the incoming parameters are implicitly pushed by Ceres
+    // into the user parameter blocks; in contrast to in Evaluate().
+    uint64_t incoming_parameter_hash = Djb2Hash(user_parameter_block, 2);
+
+    // Check: Prepare() & Evaluate() come in pairs, in that order. Before this
+    // call, the number of calls excluding this one should match.
+    EXPECT_EQ(prepare_num_calls, evaluate_num_calls);
+
+    // Check: new_evaluation_point indicates that the parameter has changed.
+    if (new_evaluation_point) {
+      // If it's a new evaluation point, then the parameter should have
+      // changed. Technically, it's not required that it must change but
+      // in practice it does, and that helps with testing.
+      EXPECT_NE(evaluate_last_parameter_hash, incoming_parameter_hash);
+      EXPECT_NE(prepare_parameter_hash, incoming_parameter_hash);
+    } else {
+      // If this is the same evaluation point as last time, ensure that
+      // the parameters match both from the previous evaluate, the
+      // previous prepare, and the current prepare.
+      EXPECT_EQ(evaluate_last_parameter_hash, prepare_parameter_hash);
+      EXPECT_EQ(evaluate_last_parameter_hash, incoming_parameter_hash);
+    }
+
+    // Save details for to check at the next call to Evaluate().
+    prepare_num_calls++;
+    prepare_requested_jacobians = evaluate_jacobians;
+    prepare_new_evaluation_point = new_evaluation_point;
+    prepare_parameter_hash = incoming_parameter_hash;
+  }
+
+  // Cost function interface. This checks that preconditions that were
+  // set as part of the PrepareForEvaluation() call are met in this one.
+  virtual bool Evaluate(double const* const* parameters,
+                        double* residuals,
+                        double** jacobians) const {
+    // Cost function implementation of the "Wiggly Bowl" function:
+    //
+    //   1/2 * [(y - a*sin(x))^2 + x^2],
+    //
+    // expressed as a Ceres cost function with two residuals:
+    //
+    //   r[0] = y - a*sin(x)
+    //   r[1] = x.
+    //
+    // This is harder to optimize than the Rosenbrock function because the
+    // minimizer has to navigate a sine-shaped valley while descending the 1D
+    // parabola formed along the y axis. Note that the "a" needs to be more
+    // than 5 to get a strong enough wiggle effect in the cost surface to
+    // trigger failed iterations in the optimizer.
+    const double a = 10.0;
+    double x = (*parameters)[0];
+    double y = (*parameters)[1];
+    residuals[0] = y - a * sin(x);
+    residuals[1] = x;
+    if (jacobians != NULL) {
+      (*jacobians)[2 * 0 + 0] = - a * cos(x);  // df1/dx
+      (*jacobians)[2 * 0 + 1] = 1.0;           // df1/dy
+      (*jacobians)[2 * 1 + 0] = 1.0;           // df2/dx
+      (*jacobians)[2 * 1 + 1] = 0.0;           // df2/dy
+    }
+
+    uint64_t incoming_parameter_hash = Djb2Hash(*parameters, 2);
+
+    // Check: PrepareForEvaluation() & Evaluate() come in pairs, in that order.
+    EXPECT_EQ(prepare_num_calls, evaluate_num_calls + 1);
+
+    // Check: if new_evaluation_point indicates that the parameter has
+    // changed, it has changed; otherwise it is the same.
+    if (prepare_new_evaluation_point) {
+      EXPECT_NE(evaluate_last_parameter_hash, incoming_parameter_hash);
+    } else {
+      EXPECT_NE(evaluate_last_parameter_hash, kUninitialized);
+      EXPECT_EQ(evaluate_last_parameter_hash, incoming_parameter_hash);
+    }
+
+    // Check: Parameter matches value in in parameter blocks during prepare.
+    EXPECT_EQ(prepare_parameter_hash, incoming_parameter_hash);
+
+    // Check: jacobians are requested if they were in PrepareForEvaluation().
+    EXPECT_EQ(prepare_requested_jacobians, jacobians != NULL);
+
+    evaluate_num_calls++;
+    evaluate_last_parameter_hash = incoming_parameter_hash;
+    return true;
+  }
+
+  // Pointer to the parameter block associated with this cost function.
+  // Contents should get set by Ceres before calls to PrepareForEvaluation()
+  // and Evaluate().
+  double* user_parameter_block;
+
+  // Track state: PrepareForEvaluation().
+  //
+  // These track details from the PrepareForEvaluation() call (hence the
+  // "prepare_" prefix), which are checked for consistency in Evaluate().
+  int prepare_num_calls;
+  bool prepare_requested_jacobians;
+  bool prepare_new_evaluation_point;
+  uint64_t prepare_parameter_hash;
+
+  // Track state: Evaluate().
+  //
+  // These track details from the Evaluate() call (hence the "evaluate_"
+  // prefix), which are then checked for consistency in the calls to
+  // PrepareForEvaluation(). Mutable is reasonable for this case.
+  mutable int evaluate_num_calls;
+  mutable uint64_t evaluate_last_parameter_hash;
+};
+
+TEST(EvaluationCallback, WithTrustRegionMinimizer) {
+  double parameters[2] = {50.0, 50.0};
+  const uint64_t original_parameters_hash = Djb2Hash(parameters, 2);
+
+  WigglyBowlCostFunctionAndEvaluationCallback cost_function(parameters);
+  Problem::Options problem_options;
+  problem_options.cost_function_ownership = DO_NOT_TAKE_OWNERSHIP;
+  Problem problem(problem_options);
+  problem.AddResidualBlock(&cost_function, NULL, parameters);
+
+  Solver::Options options;
+  options.linear_solver_type = DENSE_QR;
+  options.max_num_iterations = 300;  // Cost function is hard.
+  options.evaluation_callback = &cost_function;
+
+  // Run the solve. Checking is done inside the cost function / callback.
+  Solver::Summary summary;
+  Solve(options, &problem, &summary);
+
+  // Ensure that this was a hard cost function (not all steps succeed).
+  EXPECT_GT(summary.num_successful_steps, 10);
+  EXPECT_GT(summary.num_unsuccessful_steps, 10);
+
+  // Ensure PrepareForEvaluation() is called the appropriate number of times.
+  EXPECT_EQ(cost_function.prepare_num_calls,
+            // Unsuccessful steps are evaluated only once (no jacobians).
+            summary.num_unsuccessful_steps +
+            // Successful steps are evaluated twice: with and without jacobians.
+            2 * summary.num_successful_steps
+            // Final iteration doesn't re-evaluate the jacobian.
+            // Note: This may be sensitive to tweaks to the TR algorithm; if
+            // this becomes too brittle, remove this EXPECT_EQ() entirely.
+            - 1);
+
+  // Ensure the callback calls ran a reasonable number of times.
+  EXPECT_GT(cost_function.prepare_num_calls, 0);
+  EXPECT_GT(cost_function.evaluate_num_calls, 0);
+  EXPECT_EQ(cost_function.prepare_num_calls,
+            cost_function.evaluate_num_calls);
+
+  // Ensure that the parameters did actually change.
+  EXPECT_NE(Djb2Hash(parameters, 2), original_parameters_hash);
+}
+
+void WithLineSearchMinimizerImpl(
+    LineSearchType line_search,
+    LineSearchDirectionType line_search_direction,
+    LineSearchInterpolationType line_search_interpolation) {
+  double parameters[2] = {50.0, 50.0};
+  const uint64_t original_parameters_hash = Djb2Hash(parameters, 2);
+
+  WigglyBowlCostFunctionAndEvaluationCallback cost_function(parameters);
+  Problem::Options problem_options;
+  problem_options.cost_function_ownership = DO_NOT_TAKE_OWNERSHIP;
+  Problem problem(problem_options);
+  problem.AddResidualBlock(&cost_function, NULL, parameters);
+
+  Solver::Options options;
+  options.linear_solver_type = DENSE_QR;
+  options.max_num_iterations = 300;  // Cost function is hard.
+  options.minimizer_type = ceres::LINE_SEARCH;
+  options.evaluation_callback = &cost_function;
+  options.line_search_type = line_search;
+  options.line_search_direction_type = line_search_direction;
+  options.line_search_interpolation_type = line_search_interpolation;
+
+  // Run the solve. Checking is done inside the cost function / callback.
+  Solver::Summary summary;
+  Solve(options, &problem, &summary);
+
+  // Ensure the callback calls ran a reasonable number of times.
+  EXPECT_GT(summary.num_line_search_steps, 10);
+  EXPECT_GT(cost_function.prepare_num_calls, 30);
+  EXPECT_EQ(cost_function.prepare_num_calls,
+            cost_function.evaluate_num_calls);
+
+  // Ensure that the parameters did actually change.
+  EXPECT_NE(Djb2Hash(parameters, 2), original_parameters_hash);
+}
+
+// Note: These tests omit combinations of Wolfe line search with bisection.
+// Due to an implementation quirk in Wolfe line search with bisection, there
+// are calls to re-evaluate an existing point with new_point = true. That
+// causes the (overly) strict tests to break, since they check the new_point
+// preconditions in an if-and-only-if way. Strictly speaking, if new_point =
+// true, the interface does not *require* that the point has changed; only that
+// if new_point = false, the same point is reused.
+//
+// Since the strict checking is useful to verify that there aren't missed
+// optimizations, omit tests of the Wolfe with bisection cases.
+
+// Wolfe with L-BFGS.
+TEST(EvaluationCallback, WithLineSearchMinimizerWolfeLbfgsCubic) {
+  WithLineSearchMinimizerImpl(WOLFE, LBFGS, CUBIC);
+}
+TEST(EvaluationCallback, WithLineSearchMinimizerWolfeLbfgsQuadratic) {
+  WithLineSearchMinimizerImpl(WOLFE, LBFGS, QUADRATIC);
+}
+
+// Wolfe with full BFGS.
+TEST(EvaluationCallback, WithLineSearchMinimizerWolfeBfgsCubic) {
+  WithLineSearchMinimizerImpl(WOLFE, BFGS, CUBIC);
+}
+
+TEST(EvaluationCallback, WithLineSearchMinimizerWolfeBfgsQuadratic) {
+  WithLineSearchMinimizerImpl(WOLFE, BFGS, QUADRATIC);
+}
+
+// Armijo with nonlinear conjugate gradient.
+TEST(EvaluationCallback, WithLineSearchMinimizerArmijoCubic) {
+  WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, CUBIC);
+}
+
+TEST(EvaluationCallback, WithLineSearchMinimizerArmijoBisection) {
+  WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, BISECTION);
+}
+
+TEST(EvaluationCallback, WithLineSearchMinimizerArmijoQuadratic) {
+  WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, QUADRATIC);
+}
+
+}  // namespace internal
+}  // namespace ceres
diff --git a/internal/ceres/evaluator.h b/internal/ceres/evaluator.h
index cbb7aa2..532f437 100644
--- a/internal/ceres/evaluator.h
+++ b/internal/ceres/evaluator.h
@@ -44,6 +44,7 @@
 namespace ceres {
 
 struct CRSMatrix;
+class EvaluationCallback;
 
 namespace internal {
 
@@ -64,13 +65,15 @@
           num_eliminate_blocks(-1),
           linear_solver_type(DENSE_QR),
           dynamic_sparsity(false),
-          context(NULL) {}
+          context(NULL),
+          evaluation_callback(NULL) {}
 
     int num_threads;
     int num_eliminate_blocks;
     LinearSolverType linear_solver_type;
     bool dynamic_sparsity;
     ContextImpl* context;
+    EvaluationCallback* evaluation_callback;
   };
 
   static Evaluator* Create(const Options& options,
@@ -98,12 +101,16 @@
   // Options struct to control Evaluator::Evaluate;
   struct EvaluateOptions {
     EvaluateOptions()
-        : apply_loss_function(true) {
+        : apply_loss_function(true),
+          new_evaluation_point(true) {
     }
 
     // If false, the loss function correction is not applied to the
     // residual blocks.
     bool apply_loss_function;
+
+    // If false, this evaluation point is the same as the last one.
+    bool new_evaluation_point;
   };
 
   // Evaluate the cost function for the given state. Returns the cost,
diff --git a/internal/ceres/line_search_minimizer.cc b/internal/ceres/line_search_minimizer.cc
index cc2e3c3..5b5b837 100644
--- a/internal/ceres/line_search_minimizer.cc
+++ b/internal/ceres/line_search_minimizer.cc
@@ -332,7 +332,10 @@
       current_state.cost = optimal_point.value;
       current_state.gradient = optimal_point.vector_gradient;
     } else {
-      if (!evaluator->Evaluate(optimal_point.vector_x.data(),
+      Evaluator::EvaluateOptions evaluate_options;
+      evaluate_options.new_evaluation_point = false;
+      if (!evaluator->Evaluate(evaluate_options,
+                               optimal_point.vector_x.data(),
                                &(current_state.cost),
                                NULL,
                                current_state.gradient.data(),
diff --git a/internal/ceres/line_search_preprocessor.cc b/internal/ceres/line_search_preprocessor.cc
index 7ddb280..72c1dd8 100644
--- a/internal/ceres/line_search_preprocessor.cc
+++ b/internal/ceres/line_search_preprocessor.cc
@@ -60,6 +60,7 @@
   pp->evaluator_options.num_eliminate_blocks = 0;
   pp->evaluator_options.num_threads = pp->options.num_threads;
   pp->evaluator_options.context = pp->problem->context();
+  pp->evaluator_options.evaluation_callback = pp->options.evaluation_callback;
   pp->evaluator.reset(Evaluator::Create(pp->evaluator_options,
                                         pp->reduced_program.get(),
                                         &pp->error));
diff --git a/internal/ceres/program_evaluator.h b/internal/ceres/program_evaluator.h
index 5986d81..10dcc06 100644
--- a/internal/ceres/program_evaluator.h
+++ b/internal/ceres/program_evaluator.h
@@ -85,6 +85,7 @@
 #include <map>
 #include <string>
 #include <vector>
+#include "ceres/evaluation_callback.h"
 #include "ceres/execution_summary.h"
 #include "ceres/internal/eigen.h"
 #include "ceres/internal/scoped_ptr.h"
@@ -156,6 +157,14 @@
       return false;
     }
 
+    // Notify the user about a new evaluation point if they are interested.
+    if (options_.evaluation_callback != NULL) {
+      program_->CopyParameterBlockStateToUserState();
+      options_.evaluation_callback->PrepareForEvaluation(
+          /*jacobians=*/(gradient != NULL || jacobian != NULL),
+          evaluate_options.new_evaluation_point);
+    }
+
     if (residuals != NULL) {
       VectorRef(residuals, program_->NumResiduals()).setZero();
     }
diff --git a/internal/ceres/solver.cc b/internal/ceres/solver.cc
index 3de9c69..4083c59 100644
--- a/internal/ceres/solver.cc
+++ b/internal/ceres/solver.cc
@@ -124,6 +124,14 @@
     OPTION_GE(inner_iteration_tolerance, 0.0);
   }
 
+  if (options.use_inner_iterations &&
+      options.evaluation_callback != NULL) {
+    *error =  "Inner iterations (use_inner_iterations = true) can't be "
+        "combined with an evaluation callback "
+        "(options.evaluation_callback != NULL).";
+    return false;
+  }
+
   if (options.use_nonmonotonic_steps) {
     OPTION_GT(max_consecutive_nonmonotonic_steps, 0);
   }
@@ -140,7 +148,7 @@
       options.sparse_linear_algebra_library_type != SUITE_SPARSE) {
     *error =  "CLUSTER_JACOBI requires "
         "Solver::Options::sparse_linear_algebra_library_type to be "
-        "SUITE_SPARSE";
+        "SUITE_SPARSE.";
     return false;
   }
 
@@ -148,7 +156,7 @@
       options.sparse_linear_algebra_library_type != SUITE_SPARSE) {
     *error =  "CLUSTER_TRIDIAGONAL requires "
         "Solver::Options::sparse_linear_algebra_library_type to be "
-        "SUITE_SPARSE";
+        "SUITE_SPARSE.";
     return false;
   }
 
diff --git a/internal/ceres/solver_test.cc b/internal/ceres/solver_test.cc
index 5aa4a94..80f12e4 100644
--- a/internal/ceres/solver_test.cc
+++ b/internal/ceres/solver_test.cc
@@ -34,6 +34,7 @@
 #include <cmath>
 #include <vector>
 #include "gtest/gtest.h"
+#include "ceres/evaluation_callback.h"
 #include "ceres/internal/scoped_ptr.h"
 #include "ceres/autodiff_cost_function.h"
 #include "ceres/sized_cost_function.h"
@@ -84,6 +85,15 @@
   std::vector<double> x_values;
 };
 
+struct NoOpEvaluationCallback : EvaluationCallback {
+  virtual ~NoOpEvaluationCallback() {}
+  virtual void PrepareForEvaluation(bool evaluate_jacobians,
+                                    bool new_evaluation_point) {
+    (void) evaluate_jacobians;
+    (void) new_evaluation_point;
+  }
+};
+
 TEST(Solver, UpdateStateEveryIterationOption) {
   double x = 50.0;
   const double original_x = x;
@@ -104,7 +114,14 @@
 
   int num_iterations;
 
-  // First try: no updating.
+  // There are four cases that need to be checked:
+  //
+  //   (update_state_every_iteration = true|false) X
+  //   (evaluation_callback = NULL|provided)
+  //
+  // These need to get checked since there is some interaction between them.
+
+  // First: update_state_every_iteration=false, evaluation_callback=NULL.
   Solve(options, &problem, &summary);
   num_iterations = summary.num_successful_steps +
                    summary.num_unsuccessful_steps;
@@ -113,7 +130,7 @@
     EXPECT_EQ(50.0, callback.x_values[i]);
   }
 
-  // Second try: with updating
+  // Second: update_state_every_iteration=true, evaluation_callback=NULL.
   x = 50.0;
   options.update_state_every_iteration = true;
   callback.x_values.clear();
@@ -123,6 +140,32 @@
   EXPECT_GT(num_iterations, 1);
   EXPECT_EQ(original_x, callback.x_values[0]);
   EXPECT_NE(original_x, callback.x_values[1]);
+
+  NoOpEvaluationCallback evaluation_callback;
+
+  // Third: update_state_every_iteration=true, evaluation_callback=!NULL.
+  x = 50.0;
+  options.update_state_every_iteration = true;
+  options.evaluation_callback = &evaluation_callback;
+  callback.x_values.clear();
+  Solve(options, &problem, &summary);
+  num_iterations = summary.num_successful_steps +
+                   summary.num_unsuccessful_steps;
+  EXPECT_GT(num_iterations, 1);
+  EXPECT_EQ(original_x, callback.x_values[0]);
+  EXPECT_NE(original_x, callback.x_values[1]);
+
+  // Fourth: update_state_every_iteration=false, evaluation_callback=!NULL.
+  x = 50.0;
+  options.update_state_every_iteration = false;
+  options.evaluation_callback = &evaluation_callback;
+  callback.x_values.clear();
+  Solve(options, &problem, &summary);
+  num_iterations = summary.num_successful_steps +
+                   summary.num_unsuccessful_steps;
+  EXPECT_GT(num_iterations, 1);
+  EXPECT_EQ(original_x, callback.x_values[0]);
+  EXPECT_NE(original_x, callback.x_values[1]);
 }
 
 // The parameters must be in separate blocks so that they can be individually
@@ -368,6 +411,30 @@
   EXPECT_TRUE(options.IsValid(&message));
 }
 
+TEST(Solver, CantMixEvaluationCallbackWithInnerIterations) {
+  Solver::Options options;
+  NoOpEvaluationCallback evaluation_callback;
+  string message;
+
+  // Can't combine them.
+  options.use_inner_iterations = true;
+  options.evaluation_callback = &evaluation_callback;
+  EXPECT_FALSE(options.IsValid(&message));
+
+  // Either or none is OK.
+  options.use_inner_iterations = false;
+  options.evaluation_callback = &evaluation_callback;
+  EXPECT_TRUE(options.IsValid(&message));
+
+  options.use_inner_iterations = true;
+  options.evaluation_callback = NULL;
+  EXPECT_TRUE(options.IsValid(&message));
+
+  options.use_inner_iterations = false;
+  options.evaluation_callback = NULL;
+  EXPECT_TRUE(options.IsValid(&message));
+}
+
 template<int kNumResiduals, int N1 = 0, int N2 = 0, int N3 = 0>
 class DummyCostFunction : public SizedCostFunction<kNumResiduals, N1, N2, N3> {
  public:
diff --git a/internal/ceres/trust_region_minimizer.cc b/internal/ceres/trust_region_minimizer.cc
index 3a779c6..a863e63 100644
--- a/internal/ceres/trust_region_minimizer.cc
+++ b/internal/ceres/trust_region_minimizer.cc
@@ -201,7 +201,7 @@
     x_norm_ = x_.norm();
   }
 
-  if (!EvaluateGradientAndJacobian()) {
+  if (!EvaluateGradientAndJacobian(/*new_evaluation_point=*/true)) {
     return false;
   }
 
@@ -223,8 +223,12 @@
 // Returns true if all computations could be performed
 // successfully. Any failures are considered fatal and the
 // Solver::Summary is updated to indicate this.
-bool TrustRegionMinimizer::EvaluateGradientAndJacobian() {
-  if (!evaluator_->Evaluate(x_.data(),
+bool TrustRegionMinimizer::EvaluateGradientAndJacobian(
+    bool new_evaluation_point) {
+  Evaluator::EvaluateOptions evaluate_options;
+  evaluate_options.new_evaluation_point = new_evaluation_point;
+  if (!evaluator_->Evaluate(evaluate_options,
+                            x_.data(),
                             &x_cost_,
                             residuals_.data(),
                             gradient_.data(),
@@ -768,7 +772,9 @@
   x_ = candidate_x_;
   x_norm_ = x_.norm();
 
-  if (!EvaluateGradientAndJacobian()) {
+  // Since the step was successful, this point has already had the residual
+  // evaluated (but not the jacobian). So indicate that to the evaluator.
+  if (!EvaluateGradientAndJacobian(/*new_evaluation_point=*/false)) {
     return false;
   }
 
diff --git a/internal/ceres/trust_region_minimizer.h b/internal/ceres/trust_region_minimizer.h
index 43141da..9c5274d 100644
--- a/internal/ceres/trust_region_minimizer.h
+++ b/internal/ceres/trust_region_minimizer.h
@@ -63,7 +63,7 @@
   bool FinalizeIterationAndCheckIfMinimizerCanContinue();
   bool ComputeTrustRegionStep();
 
-  bool EvaluateGradientAndJacobian();
+  bool EvaluateGradientAndJacobian(bool new_evaluation_point);
   void ComputeCandidatePointAndEvaluateCost();
 
   void DoLineSearch(const Vector& x,
diff --git a/internal/ceres/trust_region_preprocessor.cc b/internal/ceres/trust_region_preprocessor.cc
index 4c20aac..96f597c 100644
--- a/internal/ceres/trust_region_preprocessor.cc
+++ b/internal/ceres/trust_region_preprocessor.cc
@@ -252,6 +252,7 @@
   pp->evaluator_options.num_threads = options.num_threads;
   pp->evaluator_options.dynamic_sparsity = options.dynamic_sparsity;
   pp->evaluator_options.context = pp->problem->context();
+  pp->evaluator_options.evaluation_callback = options.evaluation_callback;
   pp->evaluator.reset(Evaluator::Create(pp->evaluator_options,
                                         pp->reduced_program.get(),
                                         &pp->error));