From d2f1894bacf584c49e62691769853f6adcff5084 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 14:26:11 -0400 Subject: [PATCH 01/10] implement errorTree for HybridNonlinearFactorGraph --- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 31 +++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactorGraph.h | 13 +++++++++ 2 files changed, 44 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 6d19ef156d..0e7e9c692d 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -179,4 +179,35 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( return linearFG; } +/* ************************************************************************* */ +AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( + const Values& values) const { + AlgebraicDecisionTree result(0.0); + + // Iterate over each factor. + for (auto& factor : factors_) { + if (auto hnf = std::dynamic_pointer_cast(factor)) { + // Compute factor error and add it. + result = result + hnf->errorTree(values); + + } else if (auto nf = std::dynamic_pointer_cast(factor)) { + // If continuous only, get the (double) error + // and add it to every leaf of the result + result = result + nf->error(values); + + } else if (auto df = std::dynamic_pointer_cast(factor)) { + // If discrete, just add its errorTree as well + result = result + df->errorTree(); + + } else { + throw std::runtime_error( + "HybridNonlinearFactorGraph::errorTree(Values) not implemented for " + "factor type " + + demangle(typeid(factor).name()) + "."); + } + } + + return result; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 5a09f18d45..53920a4aad 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -90,6 +90,19 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { /// Expose error(const HybridValues&) method. using Base::error; + /** + * @brief Compute error of (hybrid) nonlinear factors and discrete factors + * over each discrete assignment, and return as a tree. + * + * Error \f$ e = \Vert f(x) - \mu \Vert_{\Sigma} \f$. + * + * @note: Gaussian and hybrid Gaussian factors are not considered! + * + * @param values Manifold values at which to compute the error. + * @return AlgebraicDecisionTree + */ + AlgebraicDecisionTree errorTree(const Values& values) const; + /// @} }; From 2463245ec21aa77301a790b879245c0f35a75887 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 14:38:51 -0400 Subject: [PATCH 02/10] add unit test --- .../tests/testHybridNonlinearFactorGraph.cpp | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 4735c16573..8221321ea5 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -211,6 +211,24 @@ TEST(HybridNonlinearFactorGraph, PushBack) { // EXPECT_LONGS_EQUAL(3, hnfg.size()); } +/* ****************************************************************************/ +// Test hybrid nonlinear factor graph errorTree +TEST(HybridNonlinearFactorGraph, ErrorTree) { + Switching s(3); + + HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph; + + auto error_tree = graph.errorTree(s.linearizationPoint); + + auto dkeys = graph.discreteKeys(); + std::vector discrete_keys(dkeys.begin(), dkeys.end()); + std::vector leaves = {152.79175946923, 151.59861228867, + 151.70397280433, 151.60943791243}; + AlgebraicDecisionTree expected_error(discrete_keys, leaves); + // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); +} + /**************************************************************************** * Test construction of switching-like hybrid factor graph. */ From dc5c4cb99dbaf7361e70b33cfa4417886a00d40f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 15:50:33 -0400 Subject: [PATCH 03/10] compute expected error --- .../tests/testHybridNonlinearFactorGraph.cpp | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 8221321ea5..647a8b6462 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -217,15 +217,28 @@ TEST(HybridNonlinearFactorGraph, ErrorTree) { Switching s(3); HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph; + Values values = s.linearizationPoint; auto error_tree = graph.errorTree(s.linearizationPoint); auto dkeys = graph.discreteKeys(); - std::vector discrete_keys(dkeys.begin(), dkeys.end()); - std::vector leaves = {152.79175946923, 151.59861228867, - 151.70397280433, 151.60943791243}; + DiscreteKeys discrete_keys(dkeys.begin(), dkeys.end()); + + // Compute the sum of errors for each factor. + auto assignments = DiscreteValues::CartesianProduct(discrete_keys); + std::vector leaves(assignments.size()); + for (auto &&factor : graph) { + for (size_t i = 0; i < assignments.size(); ++i) { + leaves[i] += + factor->error(HybridValues(VectorValues(), assignments[i], values)); + } + } + // Swap i=1 and i=2 to give correct ordering. + double temp = leaves[1]; + leaves[1] = leaves[2]; + leaves[2] = temp; AlgebraicDecisionTree expected_error(discrete_keys, leaves); - // regression + EXPECT(assert_equal(expected_error, error_tree, 1e-7)); } From 310c4ffae9af35278d31f992be79060097ea61fb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 6 Oct 2024 11:07:02 -0400 Subject: [PATCH 04/10] update pruning comment --- gtsam/hybrid/HybridBayesNet.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 36503d2eaf..f57cda28d9 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -73,7 +73,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const { // per pruned Discrete joint. for (auto &&conditional : *this) { if (auto hgc = conditional->asHybrid()) { - // Make a copy of the hybrid Gaussian conditional and prune it! + // Prune the hybrid Gaussian conditional! auto prunedHybridGaussianConditional = hgc->prune(pruned); // Type-erase and add to the pruned Bayes Net fragment. From 94fda5dd5a97969974d9ed381afa4713b2e4f7f5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 6 Oct 2024 11:08:02 -0400 Subject: [PATCH 05/10] take descriptive comment all the way --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7dfa56e77d..5b26a623a6 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -305,6 +305,9 @@ static std::shared_ptr createDiscreteFactor( // Negative logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + // = exp(-factor->error(kEmpty)) * \sqrt{|2πΣ|}; + // log = -(-factor->error(kEmpty) + log(\sqrt{|2πΣ|})) + // = factor->error(kEmpty) - log(\sqrt{|2πΣ|}) // negLogConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. return factor->error(kEmpty) - conditional->negLogConstant(); From 873b2a714204da27f8d7cc79a453b698d94f4891 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 6 Oct 2024 11:15:46 -0400 Subject: [PATCH 06/10] fix return value of pruned factors --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 5b26a623a6..d4fb5833b6 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -240,7 +240,9 @@ discreteElimination(const HybridGaussianFactorGraph &factors, // In this case, compute discrete probabilities. auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { - if (!factor) return 0.0; + // If the factor is null, it is has been pruned hence return ∞ + // so that the exp(-∞)=0. + if (!factor) return std::numeric_limits::infinity(); return factor->error(VectorValues()); }; AlgebraicDecisionTree logProbabilities = @@ -300,8 +302,9 @@ static std::shared_ptr createDiscreteFactor( auto negLogProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; - // If the factor is not null, it has no keys, just contains the residual. - if (!factor) return 1.0; // TODO(dellaert): not loving this. + // If the factor is null, it has been pruned, hence return ∞ + // so that the exp(-∞)=0. + if (!factor) return std::numeric_limits::infinity(); // Negative logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); From 42052d07a106181239ef7eb30837a0c31573c175 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Oct 2024 11:13:52 -0400 Subject: [PATCH 07/10] additional fix --- gtsam/discrete/DecisionTreeFactor.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index d1b68f4bfb..0a453ad4a5 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -385,6 +385,16 @@ namespace gtsam { // Now threshold the decision tree size_t total = 0; auto thresholdFunc = [threshold, &total, N](const double& value) { + // There is a possible case where the `threshold` is equal to 0.0 + // In that case `(value < threshold) == false` + // which increases the leaf total erroneously. + // Hence we check for 0.0 explicitly. + if (fpEqual(value, 0.0, 1e-12)) { + return 0.0; + } + + // Check if value is less than the threshold and + // we haven't exceeded the maximum number of leaves. if (value < threshold || total >= N) { return 0.0; } else { From 0f403c7d28427e131e95dd2a89ae2e676b683521 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Oct 2024 11:54:21 -0400 Subject: [PATCH 08/10] clean up testHybridEstimation --- gtsam/hybrid/tests/testHybridEstimation.cpp | 104 +++++--------------- 1 file changed, 23 insertions(+), 81 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 88d8be0bc5..3bb659fee5 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -47,6 +47,14 @@ using namespace gtsam; using symbol_shorthand::X; using symbol_shorthand::Z; +namespace estimation_fixture { +std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, + 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; +// Ground truth discrete seq +std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, + 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; +} // namespace estimation_fixture + TEST(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; @@ -90,12 +98,10 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, IncrementalSmoother) { + using namespace estimation_fixture; + size_t K = 15; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, - 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, - 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + // Switching example of robot moving in 1D // with given measurements and equal mode priors. Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); @@ -123,10 +129,6 @@ TEST(HybridEstimation, IncrementalSmoother) { smoother.update(linearized, maxNrLeaves, ordering); graph.resize(0); - - // Uncomment to print out pruned discrete marginal: - // smoother.hybridBayesNet().at(0)->asDiscrete()->dot("smoother_" + - // std::to_string(k)); } HybridValues delta = smoother.hybridBayesNet().optimize(); @@ -149,12 +151,10 @@ TEST(HybridEstimation, IncrementalSmoother) { /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, ISAM) { + using namespace estimation_fixture; + size_t K = 15; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, - 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, - 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + // Switching example of robot moving in 1D // with given measurements and equal mode priors. Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); @@ -179,7 +179,6 @@ TEST(HybridEstimation, ISAM) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); isam.update(graph, initial, 3); - // isam.bayesTree().print("\n\n"); graph.resize(0); initial.clear(); @@ -201,65 +200,6 @@ TEST(HybridEstimation, ISAM) { EXPECT(assert_equal(expected_continuous, result)); } -/** - * @brief A function to get a specific 1D robot motion problem as a linearized - * factor graph. This is the problem P(X|Z, M), i.e. estimating the continuous - * positions given the measurements and discrete sequence. - * - * @param K The number of timesteps. - * @param measurements The vector of measurements for each timestep. - * @param discrete_seq The discrete sequence governing the motion of the robot. - * @param measurement_sigma Noise model sigma for measurements. - * @param between_sigma Noise model sigma for the between factor. - * @return GaussianFactorGraph::shared_ptr - */ -GaussianFactorGraph::shared_ptr specificModesFactorGraph( - size_t K, const std::vector& measurements, - const std::vector& discrete_seq, double measurement_sigma = 0.1, - double between_sigma = 1.0) { - NonlinearFactorGraph graph; - Values linearizationPoint; - - // Add measurement factors - auto measurement_noise = noiseModel::Isotropic::Sigma(1, measurement_sigma); - for (size_t k = 0; k < K; k++) { - graph.emplace_shared>(X(k), measurements.at(k), - measurement_noise); - linearizationPoint.insert(X(k), static_cast(k + 1)); - } - - using MotionModel = BetweenFactor; - - // Add "motion models". - auto motion_noise_model = noiseModel::Isotropic::Sigma(1, between_sigma); - for (size_t k = 0; k < K - 1; k++) { - auto motion_model = std::make_shared( - X(k), X(k + 1), discrete_seq.at(k), motion_noise_model); - graph.push_back(motion_model); - } - GaussianFactorGraph::shared_ptr linear_graph = - graph.linearize(linearizationPoint); - return linear_graph; -} - -/** - * @brief Get the discrete sequence from the integer `x`. - * - * @tparam K Template parameter so we can set the correct bitset size. - * @param x The integer to convert to a discrete binary sequence. - * @return std::vector - */ -template -std::vector getDiscreteSequence(size_t x) { - std::bitset seq = x; - std::vector discrete_seq(K - 1); - for (size_t i = 0; i < K - 1; i++) { - // Save to discrete vector in reverse order - discrete_seq[K - 2 - i] = seq[i]; - } - return discrete_seq; -} - /** * @brief Helper method to get the tree of * unnormalized probabilities as per the elimination scheme. @@ -270,7 +210,7 @@ std::vector getDiscreteSequence(size_t x) { * @param graph The HybridGaussianFactorGraph to eliminate. * @return AlgebraicDecisionTree */ -AlgebraicDecisionTree getProbPrimeTree( +AlgebraicDecisionTree GetProbPrimeTree( const HybridGaussianFactorGraph& graph) { Ordering continuous(graph.continuousKeySet()); const auto [bayesNet, remainingGraph] = @@ -316,8 +256,9 @@ AlgebraicDecisionTree getProbPrimeTree( * The values should match those of P'(Continuous) for each discrete mode. ********************************************************************************/ TEST(HybridEstimation, Probability) { + using namespace estimation_fixture; + constexpr size_t K = 4; - std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; // Switching example of robot moving in 1D with @@ -358,8 +299,9 @@ TEST(HybridEstimation, Probability) { * for each discrete mode. */ TEST(HybridEstimation, ProbabilityMultifrontal) { + using namespace estimation_fixture; + constexpr size_t K = 4; - std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; @@ -370,7 +312,7 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { auto graph = switching.linearizedFactorGraph; // Get the tree of unnormalized probabilities for each mode sequence. - AlgebraicDecisionTree expected_probPrimeTree = getProbPrimeTree(graph); + AlgebraicDecisionTree expected_probPrimeTree = GetProbPrimeTree(graph); // Eliminate continuous Ordering continuous_ordering(graph.continuousKeySet()); @@ -451,7 +393,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { /********************************************************************************* // Create a hybrid linear factor graph f(x0, x1, m0; z0, z1) ********************************************************************************/ -static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { +static HybridGaussianFactorGraph::shared_ptr CreateHybridGaussianFactorGraph() { HybridNonlinearFactorGraph nfg = createHybridNonlinearFactorGraph(); Values initial; @@ -466,7 +408,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { ********************************************************************************/ TEST(HybridEstimation, eliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. - HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); + HybridGaussianFactorGraph::shared_ptr fg = CreateHybridGaussianFactorGraph(); // Create expected discrete conditional on m0. DiscreteKey m(M(0), 2); @@ -501,7 +443,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { ********************************************************************************/ TEST(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. - const auto fg = createHybridGaussianFactorGraph(); + const auto fg = CreateHybridGaussianFactorGraph(); // 2. Eliminate into BN const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(); From 5ee27bfad18226bcfc30ea1e87a03d548b685ce5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Oct 2024 12:05:58 -0400 Subject: [PATCH 09/10] more test improvements --- gtsam/hybrid/tests/testHybridEstimation.cpp | 42 ++++++++++++--------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 3bb659fee5..cefb599c5b 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -53,6 +53,22 @@ std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, // Ground truth discrete seq std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + +Switching InitializeEstimationProblem( + const size_t K, const double between_sigma, const double measurement_sigma, + const std::vector& measurements, + const std::string& discrete_transition_prob, + HybridNonlinearFactorGraph& graph, Values& initial) { + Switching switching(K, between_sigma, measurement_sigma, measurements, + discrete_transition_prob); + + // Add the X(0) prior + graph.push_back(switching.nonlinearFactorGraph.at(0)); + initial.insert(X(0), switching.linearizationPoint.at(X(0))); + + return switching; +} + } // namespace estimation_fixture TEST(HybridEstimation, Full) { @@ -104,14 +120,11 @@ TEST(HybridEstimation, IncrementalSmoother) { // Switching example of robot moving in 1D // with given measurements and equal mode priors. - Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); - HybridSmoother smoother; HybridNonlinearFactorGraph graph; Values initial; - - // Add the X(0) prior - graph.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(0), switching.linearizationPoint.at(X(0))); + Switching switching = InitializeEstimationProblem(K, 1.0, 0.1, measurements, + "1/1 1/1", graph, initial); + HybridSmoother smoother; HybridGaussianFactorGraph linearized; @@ -157,16 +170,11 @@ TEST(HybridEstimation, ISAM) { // Switching example of robot moving in 1D // with given measurements and equal mode priors. - Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); - HybridNonlinearISAM isam; HybridNonlinearFactorGraph graph; Values initial; - - // gttic_(Estimation); - - // Add the X(0) prior - graph.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(0), switching.linearizationPoint.at(X(0))); + Switching switching = InitializeEstimationProblem(K, 1.0, 0.1, measurements, + "1/1 1/1", graph, initial); + HybridNonlinearISAM isam; HybridGaussianFactorGraph linearized; @@ -367,7 +375,7 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { /********************************************************************************* // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1) ********************************************************************************/ -static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { +static HybridNonlinearFactorGraph CreateHybridNonlinearFactorGraph() { HybridNonlinearFactorGraph nfg; constexpr double sigma = 0.5; // measurement noise @@ -394,7 +402,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { // Create a hybrid linear factor graph f(x0, x1, m0; z0, z1) ********************************************************************************/ static HybridGaussianFactorGraph::shared_ptr CreateHybridGaussianFactorGraph() { - HybridNonlinearFactorGraph nfg = createHybridNonlinearFactorGraph(); + HybridNonlinearFactorGraph nfg = CreateHybridNonlinearFactorGraph(); Values initial; double z0 = 0.0, z1 = 1.0; @@ -406,7 +414,7 @@ static HybridGaussianFactorGraph::shared_ptr CreateHybridGaussianFactorGraph() { /********************************************************************************* * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ -TEST(HybridEstimation, eliminateSequentialRegression) { +TEST(HybridEstimation, EliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = CreateHybridGaussianFactorGraph(); From ec32d5f197f0857a20c90df4ee7828c7a2130796 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Oct 2024 12:06:14 -0400 Subject: [PATCH 10/10] add test for checking pruning --- gtsam/hybrid/tests/testHybridEstimation.cpp | 49 +++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index cefb599c5b..32d7277a2b 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -161,6 +161,55 @@ TEST(HybridEstimation, IncrementalSmoother) { EXPECT(assert_equal(expected_continuous, result)); } +/****************************************************************************/ +// Test if pruned factor is set to correct error and no errors are thrown. +TEST(HybridEstimation, ValidPruningError) { + using namespace estimation_fixture; + + size_t K = 8; + + HybridNonlinearFactorGraph graph; + Values initial; + Switching switching = InitializeEstimationProblem(K, 1e-2, 1e-3, measurements, + "1/1 1/1", graph, initial); + HybridSmoother smoother; + + HybridGaussianFactorGraph linearized; + + constexpr size_t maxNrLeaves = 3; + for (size_t k = 1; k < K; k++) { + // Motion Model + graph.push_back(switching.nonlinearFactorGraph.at(k)); + // Measurement + graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); + + initial.insert(X(k), switching.linearizationPoint.at(X(k))); + + linearized = *graph.linearize(initial); + Ordering ordering = smoother.getOrdering(linearized); + + smoother.update(linearized, maxNrLeaves, ordering); + + graph.resize(0); + } + + HybridValues delta = smoother.hybridBayesNet().optimize(); + + Values result = initial.retract(delta.continuous()); + + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k)] = discrete_seq[k]; + } + EXPECT(assert_equal(expected_discrete, delta.discrete())); + + Values expected_continuous; + for (size_t k = 0; k < K; k++) { + expected_continuous.insert(X(k), measurements[k]); + } + EXPECT(assert_equal(expected_continuous, result)); +} + /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, ISAM) {