Add GradientProblem and GradientProblemSolver.

The line search minimizer in Ceres does not require that the
problems that is solving is a sum of squares. Over the past
year there have been multiple requests to expose this algorithm
on its own so that it can be used to solve unconstrained
non-linear minimization problems on its own.

With this change, a new optimization problem called
GradientProblem is introduced which is basically a thin
wrapper around a user defined functor that evaluates cost
and gradients (FirstOrderFunction) and an optional LocalParameterization.

Corresponding to it, a GradientProblemSolver and its associated
options and summary structs are introduced too.

An example that uses the new API to find the minimum of Rosenbrock's
function is also added.

Change-Id: I42bf687540da25de991e9bdb00e321239244e8b4
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index dbbcb81..e26dc9c 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -47,6 +47,9 @@
 ADD_EXECUTABLE(curve_fitting curve_fitting.cc)
 TARGET_LINK_LIBRARIES(curve_fitting ceres)
 
+ADD_EXECUTABLE(rosenbrock rosenbrock.cc)
+TARGET_LINK_LIBRARIES(rosenbrock ceres)
+
 ADD_EXECUTABLE(curve_fitting_c curve_fitting.c)
 TARGET_LINK_LIBRARIES(curve_fitting_c ceres)
 # As this is a C file #including <math.h> we have to explicitly add the math
diff --git a/examples/rosenbrock.cc b/examples/rosenbrock.cc
new file mode 100644
index 0000000..da4ee63
--- /dev/null
+++ b/examples/rosenbrock.cc
@@ -0,0 +1,74 @@
+// Ceres Solver - A fast non-linear least squares minimizer
+// Copyright 2014 Google Inc. All rights reserved.
+// http://code.google.com/p/ceres-solver/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+//   this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+//   this list of conditions and the following disclaimer in the documentation
+//   and/or other materials provided with the distribution.
+// * Neither the name of Google Inc. nor the names of its contributors may be
+//   used to endorse or promote products derived from this software without
+//   specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: sameeragarwal@google.com (Sameer Agarwal)
+
+#include "ceres/ceres.h"
+#include "glog/logging.h"
+
+// f(x,y) = (1-x)^2 + 100(y - x^2)^2;
+class Rosenbrock : public ceres::FirstOrderFunction {
+ public:
+  virtual ~Rosenbrock() {}
+
+  virtual bool Evaluate(const double* parameters,
+                        double* cost,
+                        double* gradient) const {
+    const double x = parameters[0];
+    const double y = parameters[1];
+
+    cost[0] = (1.0 - x) * (1.0 - x) + 100.0 * (y - x * x) * (y - x * x);
+    if (gradient != NULL) {
+      gradient[0] = -2.0 * (1.0 - x) - 200.0 * (y - x * x) * 2.0 * x;
+      gradient[1] = 200.0 * (y - x * x);
+    }
+    return true;
+  }
+
+  virtual int NumParameters() const { return 2; }
+};
+
+
+int main(int argc, char** argv) {
+  google::InitGoogleLogging(argv[0]);
+
+  double parameters[2] = {-1.2, 1.0};
+
+  ceres::GradientProblemSolver::Options options;
+  options.minimizer_progress_to_stdout = true;
+
+  ceres::GradientProblemSolver::Summary summary;
+  ceres::GradientProblem problem(new Rosenbrock());
+  ceres::Solve(options, problem, parameters, &summary);
+
+  std::cout << summary.FullReport() << "\n";
+  std::cout << "Initial x: " << -1.2 << " y: " << 1.0 << "\n";
+  std::cout << "Final   x: " << parameters[0]
+            << " y: " << parameters[1] << "\n";
+  return 0;
+}