Reduce verbosity of the inner iteration minimizer.

Add Minimizer::Options::is_silent which allows the user
to turn off the logging inside the minimizer completely.

In particularly this is used for silencing the logging
when inner iterations are used.

Add VLOG_IF to miniglog.

Change-Id: I4dc56e726eb012b4bbf750dc92adedba1a6d9c38
diff --git a/internal/ceres/line_search_minimizer.cc b/internal/ceres/line_search_minimizer.cc
index 5533e20..6ee514a 100644
--- a/internal/ceres/line_search_minimizer.cc
+++ b/internal/ceres/line_search_minimizer.cc
@@ -93,6 +93,7 @@
 void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
                                    double* parameters,
                                    Solver::Summary* summary) {
+  const bool is_not_silent = !options.is_silent;
   double start_time = WallTimeInSeconds();
   double iteration_start_time =  start_time;
 
@@ -124,7 +125,8 @@
 
   // Do initial cost and Jacobian evaluation.
   if (!Evaluate(evaluator, x, &current_state)) {
-    LOG(WARNING) << "Terminating: Cost and gradient evaluation failed.";
+    LOG_IF(WARNING, is_not_silent)
+        << "Terminating: Cost and gradient evaluation failed.";
     summary->termination_type = NUMERICAL_FAILURE;
     return;
   }
@@ -142,11 +144,12 @@
       options.gradient_tolerance * initial_gradient_max_norm;
 
   if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
+    VLOG_IF(1, is_not_silent)
+        << "Terminating: Gradient tolerance reached."
+        << "Relative gradient max norm: "
+        << iteration_summary.gradient_max_norm / initial_gradient_max_norm
+        << " <= " << options.gradient_tolerance;
     summary->termination_type = GRADIENT_TOLERANCE;
-    VLOG(1) << "Terminating: Gradient tolerance reached."
-            << "Relative gradient max norm: "
-            << iteration_summary.gradient_max_norm / initial_gradient_max_norm
-            << " <= " << options.gradient_tolerance;
     return;
   }
 
@@ -193,8 +196,9 @@
                                      line_search_options,
                                      &summary->error));
   if (line_search.get() == NULL) {
-    LOG(ERROR) << "Ceres bug: Unable to create a LineSearch object, please "
-               << "contact the developers!, error: " << summary->error;
+    LOG_IF(ERROR, is_not_silent)
+        << "Ceres bug: Unable to create a LineSearch object, please "
+        << "contact the developers!, error: " << summary->error;
     summary->termination_type = DID_NOT_RUN;
     return;
   }
@@ -209,16 +213,17 @@
 
     iteration_start_time = WallTimeInSeconds();
     if (iteration_summary.iteration >= options.max_num_iterations) {
+      VLOG_IF(1, is_not_silent)
+          << "Terminating: Maximum number of iterations reached.";
       summary->termination_type = NO_CONVERGENCE;
-      VLOG(1) << "Terminating: Maximum number of iterations reached.";
       break;
     }
 
     const double total_solver_time = iteration_start_time - start_time +
         summary->preprocessor_time_in_seconds;
     if (total_solver_time >= options.max_solver_time_in_seconds) {
+      VLOG_IF(1, is_not_silent) << "Terminating: Maximum solver time reached.";
       summary->termination_type = NO_CONVERGENCE;
-      VLOG(1) << "Terminating: Maximum solver time reached.";
       break;
     }
 
@@ -247,10 +252,10 @@
           StringPrintf("Line search direction failure: specified "
                        "max_num_line_search_direction_restarts: %d reached.",
                        options.max_num_line_search_direction_restarts);
-      LOG(WARNING) << summary->error << " terminating optimization.";
+      LOG_IF(WARNING, is_not_silent) << summary->error
+                                     << " terminating optimization.";
       summary->termination_type = NUMERICAL_FAILURE;
       break;
-
     } else if (!line_search_status) {
       // Restart line search direction with gradient descent on first iteration
       // as we have not yet reached our maximum number of restarts.
@@ -258,13 +263,16 @@
                options.max_num_line_search_direction_restarts);
 
       ++num_line_search_direction_restarts;
-      LOG(WARNING)
+      LOG_IF(WARNING, is_not_silent)
           << "Line search direction algorithm: "
-          << LineSearchDirectionTypeToString(options.line_search_direction_type)
-          << ", failed to produce a valid new direction at iteration: "
-          << iteration_summary.iteration << ". Restarting, number of "
-          << "restarts: " << num_line_search_direction_restarts << " / "
-          << options.max_num_line_search_direction_restarts << " [max].";
+          << LineSearchDirectionTypeToString(
+              options.line_search_direction_type)
+          << ", failed to produce a valid new direction at "
+          << "iteration: " << iteration_summary.iteration
+          << ". Restarting, number of restarts: "
+          << num_line_search_direction_restarts << " / "
+          << options.max_num_line_search_direction_restarts
+          << " [max].";
       line_search_direction.reset(
           LineSearchDirection::Create(line_search_direction_options));
       current_state.search_direction = -current_state.gradient;
@@ -295,7 +303,7 @@
                        "(current_cost - previous_cost): %.5e",
                        initial_step_size, current_state.directional_derivative,
                        (current_state.cost - previous_state.cost));
-      LOG(WARNING) << summary->error;
+      LOG_IF(WARNING, is_not_silent) << summary->error;
       summary->termination_type = NUMERICAL_FAILURE;
       break;
     }
@@ -314,20 +322,23 @@
 
     // TODO(sameeragarwal): Collect stats.
     if (!evaluator->Plus(x.data(), delta.data(), x_plus_delta.data())) {
-      LOG(WARNING) << "x_plus_delta = Plus(x, delta) failed. ";
+      LOG_IF(WARNING, is_not_silent)
+          << "x_plus_delta = Plus(x, delta) failed. ";
     } else if (!Evaluate(evaluator, x_plus_delta, &current_state)) {
-      LOG(WARNING) << "Step failed to evaluate. ";
+      LOG_IF(WARNING, is_not_silent) << "Step failed to evaluate. ";
     } else {
       x = x_plus_delta;
     }
 
     iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
     if (iteration_summary.gradient_max_norm <= absolute_gradient_tolerance) {
+      VLOG_IF(1, is_not_silent)
+          << "Terminating: Gradient tolerance reached."
+          << "Relative gradient max norm: "
+          << (iteration_summary.gradient_max_norm /
+              initial_gradient_max_norm)
+          << " <= " << options.gradient_tolerance;
       summary->termination_type = GRADIENT_TOLERANCE;
-      VLOG(1) << "Terminating: Gradient tolerance reached."
-              << "Relative gradient max norm: "
-              << iteration_summary.gradient_max_norm / initial_gradient_max_norm
-              << " <= " << options.gradient_tolerance;
       break;
     }
 
@@ -335,10 +346,11 @@
     const double absolute_function_tolerance =
         options.function_tolerance * previous_state.cost;
     if (fabs(iteration_summary.cost_change) < absolute_function_tolerance) {
-      VLOG(1) << "Terminating. Function tolerance reached. "
-              << "|cost_change|/cost: "
-              << fabs(iteration_summary.cost_change) / previous_state.cost
-              << " <= " << options.function_tolerance;
+      VLOG_IF(1, is_not_silent)
+          << "Terminating. Function tolerance reached. "
+          << "|cost_change|/cost: "
+          << fabs(iteration_summary.cost_change) / previous_state.cost
+          << " <= " << options.function_tolerance;
       summary->termination_type = FUNCTION_TOLERANCE;
       return;
     }