Add GradientProblemSolver::Options::update_state_every_iteration

The non-linear least squares solver had the ability to update the
user's parameters every iteration. Now GradientProblemSolver can
do the same.

Also a few minor Sphinx markup related cleanup to the documentation
which were found in the process of updating Sphinx on my machine
and adding the docs for this feature.

This fixes https://github.com/ceres-solver/ceres-solver/issues/246

Change-Id: Ib6b90ac22be8bfb60b14f25ad52082ba371af164
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 17de116..a1601c7 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -129,7 +129,7 @@
 
 # If true, SmartyPants will be used to convert quotes and dashes to
 # typographically correct entities.
-html_use_smartypants = True
+#html_use_smartypants = True
 
 # Custom sidebar templates, maps document names to template names.
 #html_sidebars = {}
diff --git a/docs/source/gradient_solver.rst b/docs/source/gradient_solver.rst
index 40d22ff..1356e74 100644
--- a/docs/source/gradient_solver.rst
+++ b/docs/source/gradient_solver.rst
@@ -392,12 +392,27 @@
 
    Callbacks that are executed at the end of each iteration of the
    :class:`Minimizer`. They are executed in the order that they are
-   specified in this vector. See the documentation for
-   :class:`IterationCallback` for more details.
+   specified in this vector. By default, parameter blocks are updated
+   only at the end of the optimization, i.e., when the
+   :class:`Minimizer` terminates. This behavior is controlled by
+   :member:`GradientProblemSolver::Options::update_state_every_variable`. If
+   the user wishes to have access to the update parameter blocks when
+   his/her callbacks are executed, then set
+   :member:`GradientProblemSolver::Options::update_state_every_iteration`
+   to true.
 
    The solver does NOT take ownership of these pointers.
 
 
+.. member:: bool Solver::Options::update_state_every_iteration
+
+   Default: ``false``
+
+   Normally the parameter vector is only updated when the solver
+   terminates. Setting this to true updates it every iteration. This
+   setting is useful when building an interactive application using
+   Ceres and using an :class:`IterationCallback`.
+
 :class:`GradientProblemSolver::Summary`
 ---------------------------------------
 
diff --git a/docs/source/nnls_modeling.rst b/docs/source/nnls_modeling.rst
index 88df6b8..7fb8fbb 100644
--- a/docs/source/nnls_modeling.rst
+++ b/docs/source/nnls_modeling.rst
@@ -103,11 +103,10 @@
    Compute the residual vector and the Jacobian matrices.
 
    ``parameters`` is an array of arrays of size
-   :member:`CostFunction::parameter_block_sizes_.size()` and
-   ``parameters[i]`` is an array of size
-   ``parameter_block_sizes_[i]`` that contains the
-   :math:`i^{\text{th}}` parameter block that the ``CostFunction``
-   depends on.
+   ``CostFunction::parameter_block_sizes_.size()`` and
+   ``parameters[i]`` is an array of size ``parameter_block_sizes_[i]``
+   that contains the :math:`i^{\text{th}}` parameter block that the
+   ``CostFunction`` depends on.
 
    ``parameters`` is never ``NULL``.
 
@@ -116,7 +115,7 @@
    ``residuals`` is never ``NULL``.
 
    ``jacobians`` is an array of arrays of size
-   :member:`CostFunction::parameter_block_sizes_.size()`.
+   ``CostFunction::parameter_block_sizes_.size()``.
 
    If ``jacobians`` is ``NULL``, the user is only expected to compute
    the residuals.
diff --git a/docs/source/nnls_tutorial.rst b/docs/source/nnls_tutorial.rst
index 5e78a05..3c3fc1d 100644
--- a/docs/source/nnls_tutorial.rst
+++ b/docs/source/nnls_tutorial.rst
@@ -710,7 +710,7 @@
 Since this is a large sparse problem (well large for ``DENSE_QR``
 anyways), one way to solve this problem is to set
 :member:`Solver::Options::linear_solver_type` to
-``SPARSE_NORMAL_CHOLESKY`` and call :member:`Solve`. And while this is
+``SPARSE_NORMAL_CHOLESKY`` and call :func:`Solve`. And while this is
 a reasonable thing to do, bundle adjustment problems have a special
 sparsity structure that can be exploited to solve them much more
 efficiently. Ceres provides three specialized solvers (collectively
diff --git a/include/ceres/gradient_problem_solver.h b/include/ceres/gradient_problem_solver.h
index 31fed84..f9de6fb 100644
--- a/include/ceres/gradient_problem_solver.h
+++ b/include/ceres/gradient_problem_solver.h
@@ -77,6 +77,7 @@
       parameter_tolerance = 1e-8;
       logging_type = PER_MINIMIZER_ITERATION;
       minimizer_progress_to_stdout = false;
+      update_state_every_iteration = false;
     }
 
     // Returns true if the options struct has a valid
@@ -253,6 +254,13 @@
     // is sent to STDOUT.
     bool minimizer_progress_to_stdout;
 
+    // If true, the user's parameter blocks are updated at the end of
+    // every Minimizer iteration, otherwise they are updated when the
+    // Minimizer terminates. This is useful if, for example, the user
+    // wishes to visualize the state of the optimization every
+    // iteration.
+    bool update_state_every_iteration;
+
     // Callbacks that are executed at the end of each iteration of the
     // Minimizer. An iteration may terminate midway, either due to
     // numerical failures or because one of the convergence tests has
diff --git a/internal/ceres/callbacks.cc b/internal/ceres/callbacks.cc
index 50a0ec1..01ada06 100644
--- a/internal/ceres/callbacks.cc
+++ b/internal/ceres/callbacks.cc
@@ -54,6 +54,28 @@
   return SOLVER_CONTINUE;
 }
 
+GradientProblemSolverStateUpdatingCallback::
+    GradientProblemSolverStateUpdatingCallback(
+        int num_parameters,
+        const double* internal_parameters,
+        double* user_parameters)
+    : num_parameters_(num_parameters),
+      internal_parameters_(internal_parameters),
+      user_parameters_(user_parameters) {}
+
+GradientProblemSolverStateUpdatingCallback::
+    ~GradientProblemSolverStateUpdatingCallback() {}
+
+CallbackReturnType GradientProblemSolverStateUpdatingCallback::operator()(
+    const IterationSummary& summary) {
+  if (summary.step_is_successful) {
+    std::copy(internal_parameters_,
+              internal_parameters_ + num_parameters_,
+              user_parameters_);
+  }
+  return SOLVER_CONTINUE;
+}
+
 LoggingCallback::LoggingCallback(const MinimizerType minimizer_type,
                                  const bool log_to_stdout)
     : minimizer_type(minimizer_type),
diff --git a/internal/ceres/callbacks.h b/internal/ceres/callbacks.h
index 33c66df..29a3d32 100644
--- a/internal/ceres/callbacks.h
+++ b/internal/ceres/callbacks.h
@@ -52,6 +52,21 @@
   double* parameters_;
 };
 
+// Callback for updating the externally visible state of the
+// parameters vector for GradientProblemSolver.
+class GradientProblemSolverStateUpdatingCallback : public IterationCallback {
+ public:
+  GradientProblemSolverStateUpdatingCallback(int num_parameters,
+                                             const double* internal_parameters,
+                                             double* user_parameters_);
+  virtual ~GradientProblemSolverStateUpdatingCallback();
+  virtual CallbackReturnType operator()(const IterationSummary& summary);
+ private:
+  int num_parameters_;
+  const double* internal_parameters_;
+  double* user_parameters_;
+};
+
 // Callback for logging the state of the minimizer to STDERR or
 // STDOUT depending on the user's preferences and logging level.
 class LoggingCallback : public IterationCallback {
diff --git a/internal/ceres/gradient_problem_solver.cc b/internal/ceres/gradient_problem_solver.cc
index 4add01c..920d735 100644
--- a/internal/ceres/gradient_problem_solver.cc
+++ b/internal/ceres/gradient_problem_solver.cc
@@ -98,12 +98,13 @@
                                   const GradientProblem& problem,
                                   double* parameters_ptr,
                                   GradientProblemSolver::Summary* summary) {
-  using internal::scoped_ptr;
-  using internal::WallTimeInSeconds;
-  using internal::Minimizer;
   using internal::GradientProblemEvaluator;
+  using internal::GradientProblemSolverStateUpdatingCallback;
   using internal::LoggingCallback;
+  using internal::Minimizer;
+  using internal::scoped_ptr;
   using internal::SetSummaryFinalCost;
+  using internal::WallTimeInSeconds;
 
   double start_time = WallTimeInSeconds();
 
@@ -122,6 +123,10 @@
     return;
   }
 
+  VectorRef parameters(parameters_ptr, problem.NumParameters());
+  Vector solution(problem.NumParameters());
+  solution = parameters;
+
   // TODO(sameeragarwal): This is a bit convoluted, we should be able
   // to convert to minimizer options directly, but this will do for
   // now.
@@ -137,10 +142,16 @@
                                        logging_callback.get());
   }
 
+  scoped_ptr<IterationCallback> state_updating_callback;
+  if (options.update_state_every_iteration) {
+    state_updating_callback.reset(
+        new GradientProblemSolverStateUpdatingCallback(
+            problem.NumParameters(), solution.data(), parameters_ptr));
+    minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
+                                       state_updating_callback.get());
+  }
+
   scoped_ptr<Minimizer> minimizer(Minimizer::Create(LINE_SEARCH));
-  Vector solution(problem.NumParameters());
-  VectorRef parameters(parameters_ptr, problem.NumParameters());
-  solution = parameters;
 
   Solver::Summary solver_summary;
   solver_summary.fixed_cost = 0.0;
diff --git a/internal/ceres/gradient_problem_solver_test.cc b/internal/ceres/gradient_problem_solver_test.cc
index d6332ae..20574de 100644
--- a/internal/ceres/gradient_problem_solver_test.cc
+++ b/internal/ceres/gradient_problem_solver_test.cc
@@ -72,5 +72,65 @@
   EXPECT_NEAR(1.0, parameters[1], expected_tolerance);
 }
 
+class QuadraticFunction : public ceres::FirstOrderFunction {
+  virtual ~QuadraticFunction() {}
+  virtual bool Evaluate(const double* parameters,
+                        double* cost,
+                        double* gradient) const {
+    const double x = parameters[0];
+    *cost = 0.5 * (5.0 - x) * (5.0 - x);
+    if (gradient != NULL) {
+      gradient[0] = x - 5.0;
+    }
+
+    return true;
+  }
+  virtual int NumParameters() const { return 1; }
+};
+
+struct RememberingCallback : public IterationCallback {
+  explicit RememberingCallback(double *x) : calls(0), x(x) {}
+  virtual ~RememberingCallback() {}
+  virtual CallbackReturnType operator()(const IterationSummary& summary) {
+    x_values.push_back(*x);
+    return SOLVER_CONTINUE;
+  }
+  int calls;
+  double *x;
+  std::vector<double> x_values;
+};
+
+
+TEST(Solver, UpdateStateEveryIterationOption) {
+  double x = 50.0;
+  const double original_x = x;
+
+  ceres::GradientProblem problem(new QuadraticFunction);
+  ceres::GradientProblemSolver::Options options;
+  RememberingCallback callback(&x);
+  options.callbacks.push_back(&callback);
+  ceres::GradientProblemSolver::Summary summary;
+
+  int num_iterations;
+
+  // First try: no updating.
+  ceres::Solve(options, problem, &x, &summary);
+  num_iterations = summary.iterations.size() - 1;
+  EXPECT_GT(num_iterations, 1);
+  for (int i = 0; i < callback.x_values.size(); ++i) {
+    EXPECT_EQ(50.0, callback.x_values[i]);
+  }
+
+  // Second try: with updating
+  x = 50.0;
+  options.update_state_every_iteration = true;
+  callback.x_values.clear();
+  ceres::Solve(options, problem, &x, &summary);
+  num_iterations = summary.iterations.size() - 1;
+  EXPECT_GT(num_iterations, 1);
+  EXPECT_EQ(original_x, callback.x_values[0]);
+  EXPECT_NE(original_x, callback.x_values[1]);
+}
+
 }  // namespace internal
 }  // namespace ceres