Untabify changes from Jim Roseborough

Change-Id: Ic640b34ba785669b415acfbeb2c931bea768f985
diff --git a/docs/source/analytical_derivatives.rst b/docs/source/analytical_derivatives.rst
index cfd5028..2a3a404 100644
--- a/docs/source/analytical_derivatives.rst
+++ b/docs/source/analytical_derivatives.rst
@@ -58,22 +58,22 @@
        virtual ~Rat43Analytic() {}
        virtual bool Evaluate(double const* const* parameters,
                              double* residuals,
-			     double** jacobians) const {
-	 const double b1 = parameters[0][0];
-	 const double b2 = parameters[0][1];
-	 const double b3 = parameters[0][2];
-	 const double b4 = parameters[0][3];
+                             double** jacobians) const {
+         const double b1 = parameters[0][0];
+         const double b2 = parameters[0][1];
+         const double b3 = parameters[0][2];
+         const double b4 = parameters[0][3];
 
-	 residuals[0] = b1 *  pow(1 + exp(b2 -  b3 * x_), -1.0 / b4) - y_;
+         residuals[0] = b1 *  pow(1 + exp(b2 -  b3 * x_), -1.0 / b4) - y_;
 
          if (!jacobians) return true;
-	 double* jacobian = jacobians[0];
-	 if (!jacobian) return true;
+         double* jacobian = jacobians[0];
+         if (!jacobian) return true;
 
          jacobian[0] = pow(1 + exp(b2 - b3 * x_), -1.0 / b4);
          jacobian[1] = -b1 * exp(b2 - b3 * x_) *
                        pow(1 + exp(b2 - b3 * x_), -1.0 / b4 - 1) / b4;
-	 jacobian[2] = x_ * b1 * exp(b2 - b3 * x_) *
+         jacobian[2] = x_ * b1 * exp(b2 - b3 * x_) *
                        pow(1 + exp(b2 - b3 * x_), -1.0 / b4 - 1) / b4;
          jacobian[3] = b1 * log(1 + exp(b2 - b3 * x_)) *
                        pow(1 + exp(b2 - b3 * x_), -1.0 / b4) / (b4 * b4);
@@ -97,27 +97,27 @@
        virtual ~Rat43AnalyticOptimized() {}
        virtual bool Evaluate(double const* const* parameters,
                              double* residuals,
-			     double** jacobians) const {
-	 const double b1 = parameters[0][0];
-	 const double b2 = parameters[0][1];
-	 const double b3 = parameters[0][2];
-	 const double b4 = parameters[0][3];
+                             double** jacobians) const {
+         const double b1 = parameters[0][0];
+         const double b2 = parameters[0][1];
+         const double b3 = parameters[0][2];
+         const double b4 = parameters[0][3];
 
-	 const double t1 = exp(b2 -  b3 * x_);
+         const double t1 = exp(b2 -  b3 * x_);
          const double t2 = 1 + t1;
-	 const double t3 = pow(t2, -1.0 / b4);
-	 residuals[0] = b1 * t3 - y_;
+         const double t3 = pow(t2, -1.0 / b4);
+         residuals[0] = b1 * t3 - y_;
 
          if (!jacobians) return true;
-	 double* jacobian = jacobians[0];
-	 if (!jacobian) return true;
+         double* jacobian = jacobians[0];
+         if (!jacobian) return true;
 
-	 const double t4 = pow(t2, -1.0 / b4 - 1);
-	 jacobian[0] = t3;
-	 jacobian[1] = -b1 * t1 * t4 / b4;
-	 jacobian[2] = -x_ * jacobian[1];
-	 jacobian[3] = b1 * log(t2) * t3 / (b4 * b4);
-	 return true;
+         const double t4 = pow(t2, -1.0 / b4 - 1);
+         jacobian[0] = t3;
+         jacobian[1] = -b1 * t1 * t4 / b4;
+         jacobian[2] = -x_ * jacobian[1];
+         jacobian[3] = b1 * log(t2) * t3 / (b4 * b4);
+         return true;
        }
 
      private:
@@ -182,11 +182,11 @@
 .. rubric:: Footnotes
 
 .. [#f1] The notion of best fit depends on the choice of the objective
-	 function used to measure the quality of fit, which in turn
-	 depends on the underlying noise process which generated the
-	 observations. Minimizing the sum of squared differences is
-	 the right thing to do when the noise is `Gaussian
-	 <https://en.wikipedia.org/wiki/Normal_distribution>`_. In
-	 that case the optimal value of the parameters is the `Maximum
-	 Likelihood Estimate
-	 <https://en.wikipedia.org/wiki/Maximum_likelihood_estimation>`_.
+         function used to measure the quality of fit, which in turn
+         depends on the underlying noise process which generated the
+         observations. Minimizing the sum of squared differences is
+         the right thing to do when the noise is `Gaussian
+         <https://en.wikipedia.org/wiki/Normal_distribution>`_. In
+         that case the optimal value of the parameters is the `Maximum
+         Likelihood Estimate
+         <https://en.wikipedia.org/wiki/Maximum_likelihood_estimation>`_.
diff --git a/docs/source/automatic_derivatives.rst b/docs/source/automatic_derivatives.rst
index 47a10af..1251814 100644
--- a/docs/source/automatic_derivatives.rst
+++ b/docs/source/automatic_derivatives.rst
@@ -39,7 +39,7 @@
 
   CostFunction* cost_function =
         new AutoDiffCostFunction<Rat43CostFunctor, 1, 4>(
-	  new Rat43CostFunctor(x, y));
+          new Rat43CostFunctor(x, y));
 
 Notice that compared to numeric differentiation, the only difference
 when defining the functor for use with automatic differentiation is
@@ -220,7 +220,7 @@
    template <int N>  Jet<N> pow(const Jet<N>& f, const Jet<N>& g) {
      return Jet<N>(pow(f.a, g.a),
                    g.a * pow(f.a, g.a - 1.0) * f.v +
-		   pow(f.a, g.a) * log(f.a); * g.v);
+                   pow(f.a, g.a) * log(f.a); * g.v);
    }