Use the evaluator to compute the gradient in TrustRegionMinimizer.

Evaluator now uses custom BLAS for gradient
computations.

Update the evaluator in trust_region_minimizer_test to compute
gradients.

Change-Id: I3f565bc203b47b2b795a0609d67f25775648653c
diff --git a/internal/ceres/program_evaluator.h b/internal/ceres/program_evaluator.h
index de56ac2..1f7fc0f 100644
--- a/internal/ceres/program_evaluator.h
+++ b/internal/ceres/program_evaluator.h
@@ -85,6 +85,7 @@
 
 #include <map>
 #include <vector>
+#include "ceres/blas.h"
 #include "ceres/execution_summary.h"
 #include "ceres/internal/eigen.h"
 #include "ceres/internal/scoped_ptr.h"
@@ -230,14 +231,13 @@
           if (parameter_block->IsConstant()) {
             continue;
           }
-          MatrixRef block_jacobian(block_jacobians[j],
-                                   num_residuals,
-                                   parameter_block->LocalSize());
-          VectorRef block_gradient(scratch->gradient.get() +
-                                   parameter_block->delta_offset(),
-                                   parameter_block->LocalSize());
-          VectorRef block_residual(block_residuals, num_residuals);
-          block_gradient += block_residual.transpose() * block_jacobian;
+
+          MatrixTransposeVectorMultiply<Eigen::Dynamic, Eigen::Dynamic, 1>(
+              block_jacobians[j],
+              num_residuals,
+              parameter_block->LocalSize(),
+              block_residuals,
+              scratch->gradient.get() + parameter_block->delta_offset());
         }
       }
     }
diff --git a/internal/ceres/trust_region_minimizer.cc b/internal/ceres/trust_region_minimizer.cc
index 981c60a..7d4c093 100644
--- a/internal/ceres/trust_region_minimizer.cc
+++ b/internal/ceres/trust_region_minimizer.cc
@@ -139,15 +139,16 @@
 
   // Do initial cost and Jacobian evaluation.
   double cost = 0.0;
-  if (!evaluator->Evaluate(x.data(), &cost, residuals.data(), NULL, jacobian)) {
+  if (!evaluator->Evaluate(x.data(),
+                           &cost,
+                           residuals.data(),
+                           gradient.data(),
+                           jacobian)) {
     LOG(WARNING) << "Terminating: Residual and Jacobian evaluation failed.";
     summary->termination_type = NUMERICAL_FAILURE;
     return;
   }
 
-  summary->initial_cost = cost + summary->fixed_cost;
-  iteration_summary.cost = cost + summary->fixed_cost;
-
   int num_consecutive_nonmonotonic_steps = 0;
   double minimum_cost = cost;
   double reference_cost = cost;
@@ -155,17 +156,10 @@
   double candidate_cost = cost;
   double accumulated_candidate_model_cost_change = 0.0;
 
-  gradient.setZero();
-  jacobian->LeftMultiply(residuals.data(), gradient.data());
+  summary->initial_cost = cost + summary->fixed_cost;
+  iteration_summary.cost = cost + summary->fixed_cost;
   iteration_summary.gradient_max_norm = gradient.lpNorm<Eigen::Infinity>();
 
-  if (options_.jacobi_scaling) {
-    EstimateScale(*jacobian, scale.data());
-    jacobian->ScaleColumns(scale.data());
-  } else {
-    scale.setOnes();
-  }
-
   // The initial gradient max_norm is bounded from below so that we do
   // not divide by zero.
   const double initial_gradient_max_norm =
@@ -189,6 +183,13 @@
       + summary->preprocessor_time_in_seconds;
   summary->iterations.push_back(iteration_summary);
 
+  if (options_.jacobi_scaling) {
+    EstimateScale(*jacobian, scale.data());
+    jacobian->ScaleColumns(scale.data());
+  } else {
+    scale.setOnes();
+  }
+
   int num_consecutive_invalid_steps = 0;
   while (true) {
     if (!RunCallbacks(options.callbacks, iteration_summary, summary)) {
@@ -419,7 +420,7 @@
       if (!evaluator->Evaluate(x.data(),
                                &cost,
                                residuals.data(),
-                               NULL,
+                               gradient.data(),
                                jacobian)) {
         summary->termination_type = NUMERICAL_FAILURE;
         summary->error =
@@ -428,8 +429,6 @@
         return;
       }
 
-      gradient.setZero();
-      jacobian->LeftMultiply(residuals.data(), gradient.data());
       iteration_summary.gradient_max_norm = gradient.lpNorm<Eigen::Infinity>();
 
       if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
diff --git a/internal/ceres/trust_region_minimizer_test.cc b/internal/ceres/trust_region_minimizer_test.cc
index 52d682d..ed00ab2 100644
--- a/internal/ceres/trust_region_minimizer_test.cc
+++ b/internal/ceres/trust_region_minimizer_test.cc
@@ -86,12 +86,12 @@
                         const double* state,
                         double* cost,
                         double* residuals,
-                        double* /* gradient */,
+                        double* gradient,
                         SparseMatrix* jacobian) {
-    double x1 = state[0];
-    double x2 = state[1];
-    double x3 = state[2];
-    double x4 = state[3];
+    const double x1 = state[0];
+    const double x2 = state[1];
+    const double x3 = state[2];
+    const double x4 = state[3];
 
     VLOG(1) << "State: "
             << "x1=" << x1 << ", "
@@ -99,10 +99,10 @@
             << "x3=" << x3 << ", "
             << "x4=" << x4 << ".";
 
-    double f1 = x1 + 10.0 * x2;
-    double f2 = sqrt(5.0) * (x3 - x4);
-    double f3 = pow(x2 - 2.0 * x3, 2.0);
-    double f4 = sqrt(10.0) * pow(x1 - x4, 2.0);
+    const double f1 = x1 + 10.0 * x2;
+    const double f2 = sqrt(5.0) * (x3 - x4);
+    const double f3 = pow(x2 - 2.0 * x3, 2.0);
+    const double f4 = sqrt(10.0) * pow(x1 - x4, 2.0);
 
     VLOG(1) << "Function: "
             << "f1=" << f1 << ", "
@@ -162,6 +162,28 @@
       }
       VLOG(1) << "\n" << jacobian_matrix;
     }
+
+    if (gradient != NULL) {
+      int column_index = 0;
+      if (col1) {
+        gradient[column_index++] = f1  + f4 * sqrt(10.0) * 2.0 * (x1 - x4);
+      }
+
+      if (col2) {
+        gradient[column_index++] = f1 * 10.0 + f3 * 2.0 * (x2 - 2.0 * x3);
+      }
+
+      if (col3) {
+        gradient[column_index++] =
+            f2 * sqrt(5.0) + f3 * (2.0 * 2.0 * (2.0 * x3 - x2));
+      }
+
+      if (col4) {
+        gradient[column_index++] =
+            -f2 * sqrt(5.0) + f4 * sqrt(10.0) * 2.0 * (x4 - x1);
+      }
+    }
+
     return true;
   }