Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 62 additions & 3 deletions _pyceres/core/cost_functions.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,20 @@ class PyCostFunction : public ceres::CostFunction,
using ceres::CostFunction::CostFunction;
using ceres::CostFunction::set_num_residuals;

void set_use_numeric_diff(bool use_numeric_diff) {
use_numeric_diff_ = use_numeric_diff;
}

bool use_numeric_diff() const { return use_numeric_diff_; }

void set_numeric_diff_method(ceres::NumericDiffMethodType method) {
numeric_diff_method_ = method;
}

ceres::NumericDiffMethodType numeric_diff_method() const {
return numeric_diff_method_;
}

bool Evaluate(double const* const* parameters,
double* residuals,
double** jacobians) const override {
Expand All @@ -26,17 +40,31 @@ class PyCostFunction : public ceres::CostFunction,
// pointer values
if (!cached_flag) {
parameters_vec.reserve(this->parameter_block_sizes().size());
jacobians_vec.reserve(this->parameter_block_sizes().size());
residuals_wrap = py::array_t<double>(num_residuals(), residuals, no_copy);
for (size_t idx = 0; idx < parameter_block_sizes().size(); ++idx) {
parameters_vec.emplace_back(py::array_t<double>(
this->parameter_block_sizes()[idx], parameters[idx], no_copy));
}
if (jacobians) {
jacobians_vec.reserve(this->parameter_block_sizes().size());
for (size_t idx = 0; idx < parameter_block_sizes().size(); ++idx) {
jacobians_vec.emplace_back(py::array_t<double>(
this->parameter_block_sizes()[idx] * num_residuals(),
jacobians[idx],
no_copy));
}
cached_jacobians_flag = true;
}
cached_flag = true;
} else if (jacobians && !cached_jacobians_flag) {
jacobians_vec.reserve(this->parameter_block_sizes().size());
for (size_t idx = 0; idx < parameter_block_sizes().size(); ++idx) {
jacobians_vec.emplace_back(py::array_t<double>(
this->parameter_block_sizes()[idx] * num_residuals(),
jacobians[idx],
no_copy));
}
cached_flag = true;
cached_jacobians_flag = true;
}

// Check if the pointers have changed and if they have then change them
Expand All @@ -51,7 +79,7 @@ class PyCostFunction : public ceres::CostFunction,
this->parameter_block_sizes()[idx], parameters[idx], no_copy);
}
}
if (jacobians) {
if (jacobians && cached_jacobians_flag) {
info = jacobians_vec[0].request(true);
if (info.ptr != jacobians) {
for (size_t idx = 0; idx < jacobians_vec.size(); ++idx) {
Expand Down Expand Up @@ -85,10 +113,15 @@ class PyCostFunction : public ceres::CostFunction,
mutable std::vector<py::array_t<double>> jacobians_vec;
// Flag used to determine if the vectors need to be resized.
mutable bool cached_flag = false;
mutable bool cached_jacobians_flag = false;
// Buffer to contain the residuals pointer.
mutable py::array_t<double> residuals_wrap;
// Dummy variable for pybind11 to avoid a copy.
mutable py::str no_copy;

bool use_numeric_diff_ = false;
ceres::NumericDiffMethodType numeric_diff_method_ =
ceres::NumericDiffMethodType::CENTRAL;
};

void BindCostFunctions(py::module& m) {
Expand All @@ -104,6 +137,32 @@ void BindCostFunctions(py::module& m) {
&ceres::CostFunction::parameter_block_sizes,
py::return_value_policy::reference)
.def("set_num_residuals", &PyCostFunction::set_num_residuals)
.def(
"set_use_numeric_diff",
[](ceres::CostFunction& self, bool use_numeric_diff) {
auto* py_cost = dynamic_cast<PyCostFunction*>(&self);
THROW_CHECK(py_cost);
py_cost->set_use_numeric_diff(use_numeric_diff);
},
py::arg("use_numeric_diff") = true)
.def("use_numeric_diff", [](ceres::CostFunction& self) {
auto* py_cost = dynamic_cast<PyCostFunction*>(&self);
THROW_CHECK(py_cost);
return py_cost->use_numeric_diff();
})
.def(
"set_numeric_diff_method",
[](ceres::CostFunction& self, ceres::NumericDiffMethodType method) {
auto* py_cost = dynamic_cast<PyCostFunction*>(&self);
THROW_CHECK(py_cost);
py_cost->set_numeric_diff_method(method);
},
py::arg("method"))
.def("numeric_diff_method", [](ceres::CostFunction& self) {
auto* py_cost = dynamic_cast<PyCostFunction*>(&self);
THROW_CHECK(py_cost);
return py_cost->numeric_diff_method();
})
.def("set_parameter_block_sizes",
[](ceres::CostFunction& myself, std::vector<int32_t>& sizes) {
for (auto s : sizes) {
Expand Down
56 changes: 55 additions & 1 deletion _pyceres/core/problem.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#pragma once

#include "_pyceres/core/cost_functions.h"
#include "_pyceres/core/wrappers.h"
#include "_pyceres/helpers.h"
#include "_pyceres/logging.h"
Expand All @@ -23,6 +24,51 @@ void SetResidualBlocks(
}
}

class PyCostFunctionResidualFunctor {
public:
explicit PyCostFunctionResidualFunctor(py::object cost_obj)
: cost_obj_(std::move(cost_obj)),
cost_(cost_obj_.cast<PyCostFunction*>()) {}

bool operator()(double const* const* parameters, double* residuals) const {
return cost_->Evaluate(parameters, residuals, nullptr);
}

private:
py::object cost_obj_;
PyCostFunction* cost_;
};

template <ceres::NumericDiffMethodType kMethod>
ceres::CostFunction* MakeNumericDiffCostFunction(PyCostFunction* cost,
const py::object& cost_obj) {
auto* functor = new PyCostFunctionResidualFunctor(cost_obj);
auto* diff_cost = new ceres::DynamicNumericDiffCostFunction<
PyCostFunctionResidualFunctor,
kMethod>(functor);
const auto& sizes = cost->parameter_block_sizes();
for (int size : sizes) {
diff_cost->AddParameterBlock(size);
}
diff_cost->SetNumResiduals(cost->num_residuals());
return diff_cost;
}

ceres::CostFunction* CreateNumericDiffCostFunction(PyCostFunction* cost,
const py::object& cost_obj) {
switch (cost->numeric_diff_method()) {
case ceres::NumericDiffMethodType::FORWARD:
return MakeNumericDiffCostFunction<ceres::NumericDiffMethodType::FORWARD>(
cost, cost_obj);
case ceres::NumericDiffMethodType::CENTRAL:
return MakeNumericDiffCostFunction<ceres::NumericDiffMethodType::CENTRAL>(
cost, cost_obj);
default:
return MakeNumericDiffCostFunction<ceres::NumericDiffMethodType::CENTRAL>(
cost, cost_obj);
}
}

} // namespace

// Function to create Problem::Options with DO_NOT_TAKE_OWNERSHIP
Expand Down Expand Up @@ -218,7 +264,15 @@ void BindProblem(py::module& m) {
}
THROW_CHECK_EQ(num_dims, cost->parameter_block_sizes()[i]);
}
ceres::CostFunction* costw = new CostFunctionWrapper(cost);
ceres::CostFunction* costw = nullptr;
auto* py_cost = dynamic_cast<PyCostFunction*>(cost);
if (py_cost && py_cost->use_numeric_diff()) {
py::object cost_obj =
py::cast(cost, py::return_value_policy::reference);
costw = CreateNumericDiffCostFunction(py_cost, cost_obj);
} else {
costw = new CostFunctionWrapper(cost);
}
return ResidualBlockIDWrapper(
self.AddResidualBlock(costw, loss.get(), pointer_values));
},
Expand Down
6 changes: 6 additions & 0 deletions _pyceres/core/types.h
Original file line number Diff line number Diff line change
Expand Up @@ -159,4 +159,10 @@ void BindTypes(py::module& m) {
.value("USER_SUCCESS", ceres::TerminationType::USER_SUCCESS)
.value("USER_FAILURE", ceres::TerminationType::USER_FAILURE);
AddStringToEnumConstructor(termt);

auto ndmt =
py::enum_<ceres::NumericDiffMethodType>(m, "NumericDiffMethodType")
.value("FORWARD", ceres::NumericDiffMethodType::FORWARD)
.value("CENTRAL", ceres::NumericDiffMethodType::CENTRAL);
AddStringToEnumConstructor(ndmt);
}
93 changes: 93 additions & 0 deletions examples/curve_fitting.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
import numpy as np
import pyceres
import matplotlib.pyplot as plt
import math


class ExpResidual(pyceres.CostFunction):
def __init__(self, x, y_obs):
super().__init__()
self.x = x
self.y_obs = y_obs
self.set_num_residuals(1)
self.set_parameter_block_sizes([1, 1])



def Evaluate(self, parameters, residuals, jacobians):
m = parameters[0][0]
c = parameters[1][0]

# Model prediction
y_pred = math.exp(m * self.x + c)

# Residual: r = y_pred - y_obs
residuals[0] = y_pred - self.y_obs

if jacobians is not None:
# ∂r/∂m = x * exp(m x + c)
if jacobians[0] is not None:
jacobians[0][0] = self.x * y_pred

# ∂r/∂c = exp(m x + c)
if jacobians[1] is not None:
jacobians[1][0] = y_pred

return True



def main():
# True parameters
m_true = 0.3
c_true = 0.1
sigma = 0.4

# Generate data
x = np.linspace(-5, 5, 400)
y_clean = np.exp(m_true * x + c_true)
noise = np.random.normal(0.0, sigma, size=x.shape)
y_noisy = y_clean + noise

# Initial guesses (what Ceres will optimize)
m_est = np.array([0.0], dtype=np.float64)
c_est = np.array([0.0], dtype=np.float64)

problem = pyceres.Problem()

# One residual per data point
for px, py in zip(x, y_noisy):
problem.add_residual_block(
ExpResidual(px, py),
None,
[m_est, c_est]
)

# Solve
options = pyceres.SolverOptions()
options.linear_solver_type = pyceres.LinearSolverType.DENSE_NORMAL_CHOLESKY
options.max_num_iterations = 10000
options.minimizer_progress_to_stdout = False

summary = pyceres.SolverSummary()
pyceres.solve(options, problem, summary)

print(summary.BriefReport())
print("Estimated m, c:", m_est[0], c_est[0])

# Plot results
y_fit = np.exp(m_est[0] * x + c_est[0])

plt.figure()
plt.scatter(x, y_noisy, s=10, alpha=0.5, label="Noisy data")
plt.plot(x, y_fit, "r", linewidth=2, label="Fitted curve")
plt.plot(x, y_clean, "k--", label="True curve")
plt.xlabel("x")
plt.ylabel("y")
plt.legend()
plt.grid(True)
plt.show()


if __name__ == "__main__":
main()
94 changes: 94 additions & 0 deletions examples/curve_fitting_autodiff.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import numpy as np
import pyceres
import matplotlib.pyplot as plt
import math


class ExpResidual(pyceres.CostFunction):
def __init__(self, x, y_obs):
super().__init__()
self.x = x
self.y_obs = y_obs
self.set_num_residuals(1)
self.set_parameter_block_sizes([1, 1])
self.set_use_numeric_diff(True)



def Evaluate(self, parameters, residuals, jacobians):
m = parameters[0][0]
c = parameters[1][0]

# Model prediction
y_pred = math.exp(m * self.x + c)

# Residual: r = y_pred - y_obs
residuals[0] = y_pred - self.y_obs

# if jacobians is not None:
# # ∂r/∂m = x * exp(m x + c)
# if jacobians[0] is not None:
# jacobians[0][0] = self.x * y_pred

# # ∂r/∂c = exp(m x + c)
# if jacobians[1] is not None:
# jacobians[1][0] = y_pred

return True



def main():
# True parameters
m_true = 0.3
c_true = 0.1
sigma = 0.4

# Generate data
x = np.linspace(-5, 5, 400)
y_clean = np.exp(m_true * x + c_true)
noise = np.random.normal(0.0, sigma, size=x.shape)
y_noisy = y_clean + noise

# Initial guesses (what Ceres will optimize)
m_est = np.array([0.0], dtype=np.float64)
c_est = np.array([0.0], dtype=np.float64)

problem = pyceres.Problem()

# One residual per data point
for px, py in zip(x, y_noisy):
problem.add_residual_block(
ExpResidual(px, py),
None,
[m_est, c_est]
)

# Solve
options = pyceres.SolverOptions()
options.linear_solver_type = pyceres.LinearSolverType.DENSE_NORMAL_CHOLESKY
options.max_num_iterations = 10000
options.minimizer_progress_to_stdout = False

summary = pyceres.SolverSummary()
pyceres.solve(options, problem, summary)

print(summary.BriefReport())
print("Estimated m, c:", m_est[0], c_est[0])

# Plot results
y_fit = np.exp(m_est[0] * x + c_est[0])

plt.figure()
plt.scatter(x, y_noisy, s=10, alpha=0.5, label="Noisy data")
plt.plot(x, y_fit, "r", linewidth=2, label="Fitted curve")
plt.plot(x, y_clean, "k--", label="True curve")
plt.xlabel("x")
plt.ylabel("y")
plt.legend()
plt.grid(True)
plt.show()


if __name__ == "__main__":
main()
Loading