From 75d47246685807d1c5253645620ef1dc1e0eadd4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:05:47 -0400 Subject: [PATCH 01/28] remove extra imports in DiscreteBayesNet.cpp --- gtsam/discrete/DiscreteBayesNet.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/gtsam/discrete/DiscreteBayesNet.cpp b/gtsam/discrete/DiscreteBayesNet.cpp index c1aa188282..1c5c81e456 100644 --- a/gtsam/discrete/DiscreteBayesNet.cpp +++ b/gtsam/discrete/DiscreteBayesNet.cpp @@ -18,8 +18,6 @@ #include #include -#include -#include #include namespace gtsam { From dce56417bd68ccf7da7db614dbee535bc937cbef Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:07:03 -0400 Subject: [PATCH 02/28] minor edits --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index dfafb923b0..ede3c342d1 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -209,14 +209,14 @@ TEST(GaussianMixtureFactor, Error) { * or both for each hybrid factor component. * * @param values Initial values for linearization. - * @param means The mean values for the conditional components. + * @param mus The mean values for the conditional components. * @param sigmas Noise model sigma values (standard deviation). * @param m1 The discrete mode key. * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, + const gtsam::Values &values, const std::vector &mus, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); @@ -224,11 +224,9 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = - std::make_shared>(X(1), X(2), means[0], model0); - auto f1 = - std::make_shared>(X(1), X(2), means[1], model1); - std::vector factors{f0, f1}; + auto f0 = std::make_shared>(X(1), X(2), mus[0], model0); + auto f1 = std::make_shared>(X(1), X(2), mus[1], model1); + // std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -275,7 +273,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the * means being different. */ -TEST(GaussianMixtureFactor, DifferentMeansHBN) { +TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteKey m1(M(1), 2); Values values; From 9e77eba9167b0226a09c17f59b1ecdcdd00c38d5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:09:40 -0400 Subject: [PATCH 03/28] rename X1 to X0 and X2 to X1 --- .../tests/testGaussianMixtureFactor.cpp | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index ede3c342d1..7926cdc5a2 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -224,27 +224,27 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = std::make_shared>(X(1), X(2), mus[0], model0); - auto f1 = std::make_shared>(X(1), X(2), mus[1], model1); + auto f0 = std::make_shared>(X(0), X(1), mus[0], model0); + auto f1 = std::make_shared>(X(0), X(1), mus[1], model1); // std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(1)), x2 = values.at(X(2)); + double x1 = values.at(X(0)), x2 = values.at(X(1)); Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; + {X(0), H0_1 /*Sp1*/}, + {X(1), H0_2 /*Tp2*/}}; Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; + {X(0), H1_1 /*Sp1*/}, + {X(1), H1_2 /*Tp2*/}}; // Create conditional P(Z1 | X1, X2, M1) auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, + {Z(1)}, {X(0), X(1)}, {m1}, {std::make_shared(terms0, 1, -d0, model0), std::make_shared(terms1, 1, -d1, model1)}); gtsam::HybridBayesNet bn; @@ -257,7 +257,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + auto prior = PriorFactor(X(0), x1, prior_noise).linearize(values); mixture_fg.push_back(prior); return mixture_fg; @@ -278,8 +278,8 @@ TEST(GaussianMixtureFactor, DifferentMeans) { Values values; double x1 = 0.0, x2 = 1.75; - values.insert(X(1), x1); - values.insert(X(2), x2); + values.insert(X(0), x1); + values.insert(X(1), x2); // Different means, same sigma std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; @@ -293,7 +293,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { HybridValues actual = bn->optimize(); HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, DiscreteValues{{M(1), 0}}); EXPECT(assert_equal(expected, actual)); @@ -317,7 +317,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { // If we add a measurement on X2, we have more information to work with. // Add a measurement on X2 auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, + GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(1), I_1x1, prior_noise); auto prior_x2 = meas_z2.likelihood(Vector1(x2)); @@ -327,7 +327,7 @@ TEST(GaussianMixtureFactor, DifferentMeans) { HybridValues actual = bn->optimize(); HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, DiscreteValues{{M(1), 1}}); EXPECT(assert_equal(expected, actual)); @@ -359,8 +359,8 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { Values values; double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); + values.insert(X(0), x1); + values.insert(X(1), x2); std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; HybridGaussianFactorGraph mixture_fg = @@ -369,8 +369,8 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { auto hbn = mixture_fg.eliminateSequential(); VectorValues cv; + cv.insert(X(0), Vector1(0.0)); cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); // Check that the error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); From 3fc1019220596428dd18befe525286e16d70b9cf Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 04/28] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d10..0427eef7b3 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe1..6e9f6034ea 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From cfef6d3d278ac29d006faa285baa3892623c3e90 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 05/28] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e8..6c92f02526 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 30bf261c15d8190c9eea09057bf11269d38628c7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:11:00 -0400 Subject: [PATCH 06/28] Tests which verify direct factor specification works well --- .../tests/testGaussianMixtureFactor.cpp | 145 ++++++++++++++++++ 1 file changed, 145 insertions(+) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 7926cdc5a2..10541cf886 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -388,6 +388,151 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, + const std::vector &mus, + const std::vector &sigmas, + DiscreteKey &m1) { + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = std::make_shared>(X(0), X(1), mus[0], model0) + ->linearize(values); + auto f1 = std::make_shared>(X(0), X(1), mus[1], model1) + ->linearize(values); + + // Create GaussianMixtureFactor + std::vector factors{f0, f1}; + AlgebraicDecisionTree logNormalizers( + {m1}, std::vector{ComputeLogNormalizer(model0), + ComputeLogNormalizer(model1)}); + GaussianMixtureFactor mixtureFactor({X(0), X(1)}, {m1}, factors, + logNormalizers); + + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + hfg.push_back(PriorFactor(X(0), values.at(X(0)), prior_noise) + .linearize(values)); + + return hfg; +} + +TEST(GaussianMixtureFactor, DifferentMeansFG) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 0.0, x2 = 1.75; + values.insert(X(0), x1); + values.insert(X(1), x2); + + std::vector mus = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; + + HybridGaussianFactorGraph hfg = CreateFactorGraph(values, mus, sigmas, m1); + + { + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + } + + { + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + hfg.push_back( + PriorFactor(X(1), mus[1], prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariancesFG) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(0), x1); + values.insert(X(1), x2); + + std::vector mus = {0.0, 0.0}, sigmas = {1e2, 1e-2}; + + // Create FG with GaussianMixtureFactor and prior on X1 + HybridGaussianFactorGraph mixture_fg = + CreateFactorGraph(values, mus, sigmas, m1); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(0), Vector1(0.0)); + cv.insert(X(1), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + hbn->errorTree(cv).print(); + hbn2->errorTree(cv).print(); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + /* ************************************************************************* */ int main() { TestResult tr; From bfaff504df6cba7d5426d1296b37d5e5f419d499 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:12:23 -0400 Subject: [PATCH 07/28] remove extra prints --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 10541cf886..b08fb79b75 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -517,8 +517,6 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { // Check that the error values at the MLE point μ. AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - hbn->errorTree(cv).print(); - hbn2->errorTree(cv).print(); DiscreteValues dv0{{M(1), 0}}; DiscreteValues dv1{{M(1), 1}}; From 665d75544844e7d9514c294397260e4c236977f4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 08/28] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034ea..6680abb54a 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From 07a00885139c91121b93c2e9a4d5cfed4e016e2f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 20:32:14 -0400 Subject: [PATCH 09/28] compute logNormalizers and pass to GaussianMixtureFactor --- gtsam/hybrid/GaussianMixture.cpp | 35 +++++++------ gtsam/hybrid/GaussianMixtureFactor.cpp | 68 ++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixtureFactor.h | 14 ++++-- 3 files changed, 94 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af86..6c92f02526 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,24 +200,27 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); - if (Cgm_Kgcm == 0.0) { - return likelihood_m; - } else { - // Add a constant factor to the likelihood in case the noise models - // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); - } + return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8471cef33f..0427eef7b3 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,55 @@ namespace gtsam { +/** + * @brief Helper function to augment the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors augment( + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -120,4 +164,20 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } +/* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f14..6680abb54a 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable From 13193a1dcdea2aa5e1c6b64bc47c91a78d6d7138 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 10:02:12 -0400 Subject: [PATCH 10/28] better comments --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 310f3b1e45..b4a643569c 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -586,21 +586,21 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { /** * @brief Helper function to specify a Hybrid Bayes Net - * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph - * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. + * P(X1)P(Z1 | X1, X2, M1) and convert it to a Hybrid Factor Graph + * ϕ(X1)ϕ(X1, X2, M1; Z1) by converting to likelihoods given Z1. * * We can specify either different means or different sigmas, * or both for each hybrid factor component. * * @param values Initial values for linearization. - * @param mus The mean values for the conditional components. + * @param means The mean values for the conditional components. * @param sigmas Noise model sigma values (standard deviation). * @param m1 The discrete mode key. * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &mus, + const gtsam::Values &values, const std::vector &means, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); @@ -608,9 +608,11 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); // GaussianMixtureFactor component factors - auto f0 = std::make_shared>(X(0), X(1), mus[0], model0); - auto f1 = std::make_shared>(X(0), X(1), mus[1], model1); - // std::vector factors{f0, f1}; + auto f0 = + std::make_shared>(X(0), X(1), means[0], model0); + auto f1 = + std::make_shared>(X(0), X(1), means[1], model1); + std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -651,7 +653,7 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /** * @brief Test components with differing means. * - * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), + * We specify a hybrid Bayes network P(Z | X, M) =P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. * This is a different case since now we have a hybrid factor * with 2 continuous variables ϕ(x1, x2, m1). From 51a2fd5334500a09fa744c1535278a13675cf4da Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 5 Sep 2024 20:03:55 -0400 Subject: [PATCH 11/28] improved comments --- .../tests/testGaussianMixtureFactor.cpp | 64 +++++++++++++------ 1 file changed, 43 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index b4a643569c..fc832d704d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -388,10 +388,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { namespace test_two_state_estimation { /// Create Two State Bayes Network with measurements +/// The Bayes network is P(z0|x0)P(x1|x0,m1)p(m1) and optionally p(z1|x1) static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, double sigma1, bool add_second_measurement = false, - double prior_sigma = 1e-3, double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1); @@ -436,7 +436,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /** * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * - * P(f01|x1,x0,m1) has different means and same covariance. + * P(x1|x0,m1) has different means and same covariance. * * Converting to a factor graph gives us * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) @@ -490,7 +490,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /** * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * - * P(f01|x1,x0,m1) has different means and different covariances. + * P(x1|x0,m1) has different means and different covariances. * * Converting to a factor graph gives us * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) @@ -612,7 +612,6 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared>(X(0), X(1), means[0], model0); auto f1 = std::make_shared>(X(0), X(1), means[1], model1); - std::vector factors{f0, f1}; /// Get terms for each p^m(z1 | x1, x2) Matrix H0_1, H0_2, H1_1, H1_2; @@ -635,11 +634,10 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared(terms1, 1, -d1, model1)}); gtsam::HybridBayesNet bn; bn.emplace_back(gm); - // bn.print(); // Create FG via toFactorGraph gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 + measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); // Linearized prior factor on X1 @@ -653,11 +651,11 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /** * @brief Test components with differing means. * - * We specify a hybrid Bayes network P(Z | X, M) =P(X1)P(Z1 | X1, X2, M1), + * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. * This is a different case since now we have a hybrid factor * with 2 continuous variables ϕ(x1, x2, m1). - * p(Z1 | X1, X2, M1) has 2 factors each for the binary + * P(Z1 | X1, X2, M1) has 2 factors each for the binary * mode m1, with only the means being different. */ TEST(GaussianMixtureFactor, DifferentMeans) { @@ -686,18 +684,16 @@ TEST(GaussianMixtureFactor, DifferentMeans) { EXPECT(assert_equal(expected, actual)); { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } } { @@ -713,10 +709,10 @@ TEST(GaussianMixtureFactor, DifferentMeans) { auto bn = hfg.eliminateSequential(); HybridValues actual = bn->optimize(); + // regression HybridValues expected( VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, DiscreteValues{{M(1), 1}}); - EXPECT(assert_equal(expected, actual)); { @@ -777,6 +773,19 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +namespace test_direct_factor_graph { +/** + * @brief Create a Factor Graph by directly specifying all + * the factors instead of creating conditionals first. + * This way we can directly provide the likelihoods and + * then perform linearization. + * + * @param values Initial values to linearize around. + * @param mus The means of the GaussianMixtureFactor components. + * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param m1 The discrete key. + * @return HybridGaussianFactorGraph + */ HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, const std::vector &mus, const std::vector &sigmas, @@ -806,8 +815,19 @@ HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, return hfg; } +} // namespace test_direct_factor_graph +/* ************************************************************************* */ +/** + * @brief Test components with differing means but the same covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ TEST(GaussianMixtureFactor, DifferentMeansFG) { + using namespace test_direct_factor_graph; + DiscreteKey m1(M(1), 2); Values values; @@ -878,13 +898,15 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { /* ************************************************************************* */ /** - * @brief Test components with differing covariances. + * @brief Test components with differing covariances but the same means. * The factor graph is * *-X1-*-X2 * | * M1 */ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { + using namespace test_direct_factor_graph; + DiscreteKey m1(M(1), 2); Values values; From 615c04ae4132b511498fa990ec404a3c87c98a4d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 09:22:25 -0400 Subject: [PATCH 12/28] some more refactor and remove redundant test --- .../tests/testGaussianMixtureFactor.cpp | 156 ++++++------------ 1 file changed, 55 insertions(+), 101 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index fc832d704d..21d3fbf83c 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -599,7 +599,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { * @param z1 The measurement value. * @return HybridGaussianFactorGraph */ -HybridGaussianFactorGraph GetFactorGraphFromBayesNet( +static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( const gtsam::Values &values, const std::vector &means, const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { // Noise models @@ -649,16 +649,19 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( /* ************************************************************************* */ /** - * @brief Test components with differing means. + * @brief Test Hybrid Factor Graph. * * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. - * This is a different case since now we have a hybrid factor - * with 2 continuous variables ϕ(x1, x2, m1). - * P(Z1 | X1, X2, M1) has 2 factors each for the binary - * mode m1, with only the means being different. + * This is different from the TwoStateModel version since + * we use a factor with 2 continuous variables ϕ(x1, x2, m1) + * directly instead of a conditional. + * This serves as a good sanity check. + * + * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary + * mode m1. */ -TEST(GaussianMixtureFactor, DifferentMeans) { +TEST(GaussianMixtureFactor, FactorGraphFromBayesNet) { DiscreteKey m1(M(1), 2); Values values; @@ -683,18 +686,16 @@ TEST(GaussianMixtureFactor, DifferentMeans) { EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - double error0 = bn->error(HybridValues(cont0, dv0)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - double error1 = bn->error(HybridValues(cont1, dv1)); - EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); - } + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } { // If we add a measurement on X2, we have more information to work with. @@ -715,62 +716,18 @@ TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteValues{{M(1), 1}}); EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); - } - } -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - * Same as the DifferentMeans example but in this case, - * we keep the means the same and vary the covariances. - */ -TEST(GaussianMixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(0), x1); - values.insert(X(1), x2); - - std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; - HybridGaussianFactorGraph mixture_fg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(0), Vector1(0.0)); - cv.insert(X(1), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, bn->error(HybridValues(cont0, dv0)), + 1e-9); - EXPECT(assert_equal(expected_m1, actual_m1)); + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, bn->error(HybridValues(cont1, dv1)), + 1e-9); + } } namespace test_direct_factor_graph { @@ -781,23 +738,24 @@ namespace test_direct_factor_graph { * then perform linearization. * * @param values Initial values to linearize around. - * @param mus The means of the GaussianMixtureFactor components. + * @param means The means of the GaussianMixtureFactor components. * @param sigmas The covariances of the GaussianMixtureFactor components. * @param m1 The discrete key. * @return HybridGaussianFactorGraph */ -HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, - const std::vector &mus, - const std::vector &sigmas, - DiscreteKey &m1) { +static HybridGaussianFactorGraph CreateFactorGraph( + const gtsam::Values &values, const std::vector &means, + const std::vector &sigmas, DiscreteKey &m1) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - auto f0 = std::make_shared>(X(0), X(1), mus[0], model0) - ->linearize(values); - auto f1 = std::make_shared>(X(0), X(1), mus[1], model1) - ->linearize(values); + auto f0 = + std::make_shared>(X(0), X(1), means[0], model0) + ->linearize(values); + auto f1 = + std::make_shared>(X(0), X(1), means[1], model1) + ->linearize(values); // Create GaussianMixtureFactor std::vector factors{f0, f1}; @@ -835,9 +793,9 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { values.insert(X(0), x1); values.insert(X(1), x2); - std::vector mus = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; + std::vector means = {0.0, 2.0}, sigmas = {1e-0, 1e-0}; - HybridGaussianFactorGraph hfg = CreateFactorGraph(values, mus, sigmas, m1); + HybridGaussianFactorGraph hfg = CreateFactorGraph(values, means, sigmas, m1); { auto bn = hfg.eliminateSequential(); @@ -849,26 +807,22 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { EXPECT(assert_equal(expected, actual)); - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } + DiscreteValues dv0{{M(1), 0}}; + VectorValues cont0 = bn->optimize(dv0); + double error0 = bn->error(HybridValues(cont0, dv0)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + + DiscreteValues dv1{{M(1), 1}}; + VectorValues cont1 = bn->optimize(dv1); + double error1 = bn->error(HybridValues(cont1, dv1)); + EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); } { auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); hfg.push_back( - PriorFactor(X(1), mus[1], prior_noise).linearize(values)); + PriorFactor(X(1), means[1], prior_noise).linearize(values)); auto bn = hfg.eliminateSequential(); HybridValues actual = bn->optimize(); @@ -914,11 +868,11 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { values.insert(X(0), x1); values.insert(X(1), x2); - std::vector mus = {0.0, 0.0}, sigmas = {1e2, 1e-2}; + std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; // Create FG with GaussianMixtureFactor and prior on X1 HybridGaussianFactorGraph mixture_fg = - CreateFactorGraph(values, mus, sigmas, m1); + CreateFactorGraph(values, means, sigmas, m1); auto hbn = mixture_fg.eliminateSequential(); From 9dc29e008bb9755836a4140be4656dd650691f77 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 11:20:38 -0400 Subject: [PATCH 13/28] fix test --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 21d3fbf83c..7452376c9f 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -811,7 +811,7 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { VectorValues cont0 = bn->optimize(dv0); double error0 = bn->error(HybridValues(cont0, dv0)); // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); DiscreteValues dv1{{M(1), 1}}; VectorValues cont1 = bn->optimize(dv1); From 24ec30ea4fe3e6640055bcb7a9424d966d01d591 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Sep 2024 17:32:37 -0400 Subject: [PATCH 14/28] replace emplace_back with emplace_shared --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c771edf859..7f6c12979c 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -629,12 +629,12 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( {X(0), H1_1 /*Sp1*/}, {X(1), H1_2 /*Tp2*/}}; // Create conditional P(Z1 | X1, X2, M1) - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(0), X(1)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); + auto conditionals = std::vector{ + std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}; gtsam::HybridBayesNet bn; - bn.emplace_back(gm); + bn.emplace_shared(KeyVector{Z(1)}, KeyVector{X(0), X(1)}, + DiscreteKeys{m1}, conditionals); // Create FG via toFactorGraph gtsam::VectorValues measurements; From 336b4947ad6c3dbf59ad91366b7275cb60949ecc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 15:36:22 -0400 Subject: [PATCH 15/28] fixes --- gtsam/hybrid/HybridGaussianConditional.cpp | 20 +++++++++++++++++++- gtsam/hybrid/HybridGaussianFactor.cpp | 16 ---------------- gtsam/hybrid/HybridGaussianFactor.h | 12 ------------ 3 files changed, 19 insertions(+), 29 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index bb27d39714..efa771c12a 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -224,7 +224,24 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr &conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); - s.insert(discreteKeys.begin(), discreteKeys.end()); + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return {likelihood_m, 0.0}; + } else { + // Add a constant to the likelihood in case the noise models + // are not all equal. + double c = 2.0 * Cgm_Kgcm; + return {likelihood_m, c}; + } + }); + return std::make_shared( + continuousParentKeys, discreteParentKeys, likelihoods); +} + +/* ************************************************************************* */ +std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { + std::set s(discreteKeys.begin(), discreteKeys.end()); return s; } @@ -237,6 +254,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( * const Assignment &, const GaussianConditional::shared_ptr &)> */ std::function &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree // and the gaussian mixture. diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f38dd6b841..bfebb064e1 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -163,20 +163,4 @@ double HybridGaussianFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index f52022348b..333c5d25bb 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -175,16 +175,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam From de68aecca5e894c3ddedc8f3e691cfc44c70cc1e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 15 Sep 2024 15:45:14 -0400 Subject: [PATCH 16/28] fix tests --- .../hybrid/tests/testHybridGaussianFactor.cpp | 52 +++++++++---------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 44e60d4513..f2248510ed 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -221,8 +221,9 @@ double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, return 1 / (1 + e); }; -static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, - double sigma0, double sigma1) { +static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, + double sigma0, + double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -254,7 +255,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(HybridGaussianFactor, GaussianMixtureModel) { +TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -263,7 +264,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); + auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 @@ -326,7 +327,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(HybridGaussianFactor, GaussianMixtureModel2) { +TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -335,7 +336,7 @@ TEST(HybridGaussianFactor, GaussianMixtureModel2) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); + auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma0, sigma1); double m1_high = 3.133, lambda = 4; { @@ -393,7 +394,7 @@ namespace test_two_state_estimation { DiscreteKey m1(M(1), 2); -void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { +void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model); @@ -414,7 +415,7 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( /// Create two state Bayes network with 1 or two measurement models HybridBayesNet CreateBayesNet( - const HybridGaussianConditional::shared_ptr& hybridMotionModel, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, bool add_second_measurement = false) { HybridBayesNet hbn; @@ -437,9 +438,9 @@ HybridBayesNet CreateBayesNet( /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( - const HybridBayesNet& hbn, - const HybridGaussianConditional::shared_ptr& hybridMotionModel, - const VectorValues& given, size_t N = 100000) { + const HybridBayesNet &hbn, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + const VectorValues &given, size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; @@ -758,7 +759,7 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - // GaussianMixtureFactor component factors + // HybridGaussianFactor component factors auto f0 = std::make_shared>(X(0), X(1), means[0], model0); auto f1 = @@ -783,8 +784,8 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( std::make_shared(terms0, 1, -d0, model0), std::make_shared(terms1, 1, -d1, model1)}; gtsam::HybridBayesNet bn; - bn.emplace_shared(KeyVector{Z(1)}, KeyVector{X(0), X(1)}, - DiscreteKeys{m1}, conditionals); + bn.emplace_shared( + KeyVector{Z(1)}, KeyVector{X(0), X(1)}, DiscreteKeys{m1}, conditionals); // Create FG via toFactorGraph gtsam::VectorValues measurements; @@ -812,7 +813,7 @@ static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary * mode m1. */ -TEST(GaussianMixtureFactor, FactorGraphFromBayesNet) { +TEST(HybridGaussianFactor, FactorGraphFromBayesNet) { DiscreteKey m1(M(1), 2); Values values; @@ -889,8 +890,8 @@ namespace test_direct_factor_graph { * then perform linearization. * * @param values Initial values to linearize around. - * @param means The means of the GaussianMixtureFactor components. - * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param means The means of the HybridGaussianFactor components. + * @param sigmas The covariances of the HybridGaussianFactor components. * @param m1 The discrete key. * @return HybridGaussianFactorGraph */ @@ -908,13 +909,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1) ->linearize(values); - // Create GaussianMixtureFactor - std::vector factors{f0, f1}; - AlgebraicDecisionTree logNormalizers( - {m1}, std::vector{ComputeLogNormalizer(model0), - ComputeLogNormalizer(model1)}); - GaussianMixtureFactor mixtureFactor({X(0), X(1)}, {m1}, factors, - logNormalizers); + // Create HybridGaussianFactor + std::vector factors{ + {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + HybridGaussianFactor mixtureFactor({X(0), X(1)}, {m1}, factors); HybridGaussianFactorGraph hfg; hfg.push_back(mixtureFactor); @@ -934,7 +932,7 @@ static HybridGaussianFactorGraph CreateFactorGraph( * | * M1 */ -TEST(GaussianMixtureFactor, DifferentMeansFG) { +TEST(HybridGaussianFactor, DifferentMeansFG) { using namespace test_direct_factor_graph; DiscreteKey m1(M(1), 2); @@ -1009,7 +1007,7 @@ TEST(GaussianMixtureFactor, DifferentMeansFG) { * | * M1 */ -TEST(GaussianMixtureFactor, DifferentCovariancesFG) { +TEST(HybridGaussianFactor, DifferentCovariancesFG) { using namespace test_direct_factor_graph; DiscreteKey m1(M(1), 2); @@ -1021,7 +1019,7 @@ TEST(GaussianMixtureFactor, DifferentCovariancesFG) { std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; - // Create FG with GaussianMixtureFactor and prior on X1 + // Create FG with HybridGaussianFactor and prior on X1 HybridGaussianFactorGraph mixture_fg = CreateFactorGraph(values, means, sigmas, m1); From 987ecd4a07a40cf88cebd9f820c3764701275d70 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 18 Sep 2024 18:19:24 -0400 Subject: [PATCH 17/28] undo accidental rename --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index c89e5d260e..dff1208554 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -221,9 +221,8 @@ double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, return 1 / (1 + e); }; -static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, - double sigma0, - double sigma1) { +static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, + double sigma0, double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -258,7 +257,7 @@ static HybridBayesNet GetHybridGaussianConditionalModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { +TEST(HybridGaussianFactor, GaussianMixtureModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -267,7 +266,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma, sigma); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 @@ -330,7 +329,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { +TEST(HybridGaussianFactor, GaussianMixtureModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -339,7 +338,7 @@ TEST(HybridGaussianFactor, HybridGaussianConditionalModel2) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto hbn = GetHybridGaussianConditionalModel(mu0, mu1, sigma0, sigma1); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); double m1_high = 3.133, lambda = 4; { From 80d9a5a65fc4fdf0ef45921a971597c73ef3c14f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:27:43 -0400 Subject: [PATCH 18/28] remove duplicate test and focus only on direct specification --- .../hybrid/tests/testHybridGaussianFactor.cpp | 161 +----------------- 1 file changed, 7 insertions(+), 154 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index dff1208554..8bf5080229 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -741,152 +741,6 @@ TEST(HybridGaussianFactor, TwoStateModel4) { EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); } -/** - * @brief Helper function to specify a Hybrid Bayes Net - * P(X1)P(Z1 | X1, X2, M1) and convert it to a Hybrid Factor Graph - * ϕ(X1)ϕ(X1, X2, M1; Z1) by converting to likelihoods given Z1. - * - * We can specify either different means or different sigmas, - * or both for each hybrid factor component. - * - * @param values Initial values for linearization. - * @param means The mean values for the conditional components. - * @param sigmas Noise model sigma values (standard deviation). - * @param m1 The discrete mode key. - * @param z1 The measurement value. - * @return HybridGaussianFactorGraph - */ -static HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { - // Noise models - auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); - auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - // HybridGaussianFactor component factors - auto f0 = - std::make_shared>(X(0), X(1), means[0], model0); - auto f1 = - std::make_shared>(X(0), X(1), means[1], model1); - - /// Get terms for each p^m(z1 | x1, x2) - Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(0)), x2 = values.at(X(1)); - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(0), H0_1 /*Sp1*/}, - {X(1), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(0), H1_1 /*Sp1*/}, - {X(1), H1_2 /*Tp2*/}}; - // Create conditional P(Z1 | X1, X2, M1) - auto conditionals = std::vector{ - std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}; - gtsam::HybridBayesNet bn; - bn.emplace_shared( - KeyVector{Z(1)}, KeyVector{X(0), X(1)}, DiscreteKeys{m1}, conditionals); - - // Create FG via toFactorGraph - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(0), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - return mixture_fg; -} - -/* ************************************************************************* */ -/** - * @brief Test Hybrid Factor Graph. - * - * We specify a hybrid Bayes network P(Z | X, M) = P(X1)P(Z1 | X1, X2, M1), - * which is then converted to a factor graph by specifying Z1. - * This is different from the TwoStateModel version since - * we use a factor with 2 continuous variables ϕ(x1, x2, m1) - * directly instead of a conditional. - * This serves as a good sanity check. - * - * P(Z1 | X1, X2, M1) has 2 conditionals each for the binary - * mode m1. - */ -TEST(HybridGaussianFactor, FactorGraphFromBayesNet) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 0.0, x2 = 1.75; - values.insert(X(0), x1); - values.insert(X(1), x2); - - // Different means, same sigma - std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - - HybridGaussianFactorGraph hfg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); - - { - // With no measurement on X2, each mode should be equally likely - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}}, - DiscreteValues{{M(1), 0}}); - - EXPECT(assert_equal(expected, actual)); - - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - double error0 = bn->error(HybridValues(cont0, dv0)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9); - - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - double error1 = bn->error(HybridValues(cont1, dv1)); - EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9); - } - { - // If we add a measurement on X2, we have more information to work with. - // Add a measurement on X2 - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(1), I_1x1, - prior_noise); - auto prior_x2 = meas_z2.likelihood(Vector1(x2)); - - hfg.push_back(prior_x2); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - // regression - HybridValues expected( - VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}}, - DiscreteValues{{M(1), 1}}); - EXPECT(assert_equal(expected, actual)); - - DiscreteValues dv0{{M(1), 0}}; - VectorValues cont0 = bn->optimize(dv0); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, bn->error(HybridValues(cont0, dv0)), - 1e-9); - - DiscreteValues dv1{{M(1), 1}}; - VectorValues cont1 = bn->optimize(dv1); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, bn->error(HybridValues(cont1, dv1)), - 1e-9); - } -} - namespace test_direct_factor_graph { /** * @brief Create a Factor Graph by directly specifying all @@ -902,10 +756,11 @@ namespace test_direct_factor_graph { */ static HybridGaussianFactorGraph CreateFactorGraph( const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1) { + const std::vector &sigmas, DiscreteKey &m1, + double measurement_noise = 1e-3) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise); auto f0 = std::make_shared>(X(0), X(1), means[0], model0) @@ -917,10 +772,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor std::vector factors{ {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; - HybridGaussianFactor mixtureFactor({X(0), X(1)}, {m1}, factors); + HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); + hfg.push_back(motionFactor); hfg.push_back(PriorFactor(X(0), values.at(X(0)), prior_noise) .linearize(values)); @@ -1025,10 +880,8 @@ TEST(HybridGaussianFactor, DifferentCovariancesFG) { std::vector means = {0.0, 0.0}, sigmas = {1e2, 1e-2}; // Create FG with HybridGaussianFactor and prior on X1 - HybridGaussianFactorGraph mixture_fg = - CreateFactorGraph(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); + HybridGaussianFactorGraph fg = CreateFactorGraph(values, means, sigmas, m1); + auto hbn = fg.eliminateSequential(); VectorValues cv; cv.insert(X(0), Vector1(0.0)); From 717eb7eadc8141ac0c41e9afc564b75693160c87 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:28:16 -0400 Subject: [PATCH 19/28] relinearization test --- .../tests/testHybridNonlinearFactorGraph.cpp | 60 +++++++++++++++++-- 1 file changed, 54 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3631ac44ed..2616224638 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -857,10 +857,10 @@ namespace test_relinearization { */ static HybridNonlinearFactorGraph CreateFactorGraph( const std::vector &means, const std::vector &sigmas, - DiscreteKey &m1, double x0_measurement) { + DiscreteKey &m1, double x0_measurement, double measurement_noise = 1e-3) { auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise); auto f0 = std::make_shared>(X(0), X(1), means[0], model0); @@ -871,7 +871,7 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::vector factors{ {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; - HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors); + HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); HybridNonlinearFactorGraph hfg; hfg.push_back(mixtureFactor); @@ -968,7 +968,7 @@ TEST(HybridNonlinearFactorGraph, DifferentMeans) { * | * M1 */ -TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { +TEST(HybridNonlinearFactorGraph, DifferentCovariances) { using namespace test_relinearization; DiscreteKey m1(M(1), 2); @@ -982,8 +982,10 @@ TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { // Create FG with HybridNonlinearFactor and prior on X1 HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0); - // Linearize and eliminate - auto hbn = hfg.linearize(values)->eliminateSequential(); + // Linearize + auto hgfg = hfg.linearize(values); + // and eliminate + auto hbn = hgfg->eliminateSequential(); VectorValues cv; cv.insert(X(0), Vector1(0.0)); @@ -1005,6 +1007,52 @@ TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) { EXPECT(assert_equal(expected_m1, actual_m1)); } +TEST(HybridNonlinearFactorGraph, Relinearization) { + using namespace test_relinearization; + + DiscreteKey m1(M(1), 2); + + Values values; + double x0 = 0.0, x1 = 0.8; + values.insert(X(0), x0); + values.insert(X(1), x1); + + std::vector means = {0.0, 1.0}, sigmas = {1e-2, 1e-2}; + + double prior_sigma = 1e-2; + // Create FG with HybridNonlinearFactor and prior on X1 + HybridNonlinearFactorGraph hfg = + CreateFactorGraph(means, sigmas, m1, 0.0, prior_sigma); + hfg.push_back(PriorFactor( + X(1), 1.2, noiseModel::Isotropic::Sigma(1, prior_sigma))); + + // Linearize + auto hgfg = hfg.linearize(values); + // and eliminate + auto hbn = hgfg->eliminateSequential(); + + HybridValues delta = hbn->optimize(); + values = values.retract(delta.continuous()); + + Values expected_first_result; + expected_first_result.insert(X(0), 0.0666666666667); + expected_first_result.insert(X(1), 1.13333333333); + EXPECT(assert_equal(expected_first_result, values)); + + // Re-linearize + hgfg = hfg.linearize(values); + // and eliminate + hbn = hgfg->eliminateSequential(); + delta = hbn->optimize(); + HybridValues result(delta.continuous(), delta.discrete(), + values.retract(delta.continuous())); + + HybridValues expected_result( + VectorValues{{X(0), Vector1(0)}, {X(1), Vector1(0)}}, + DiscreteValues{{M(1), 1}}, expected_first_result); + EXPECT(assert_equal(expected_result, result)); +} + /* ************************************************************************* */ int main() { From f875b86357448c71cc9e1fd7be18c2ef0710b4bb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 10:30:35 -0400 Subject: [PATCH 20/28] print nonlinear part of HybridValues --- gtsam/hybrid/HybridValues.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridValues.cpp b/gtsam/hybrid/HybridValues.cpp index 1fb2a2adbb..533cd6eab4 100644 --- a/gtsam/hybrid/HybridValues.cpp +++ b/gtsam/hybrid/HybridValues.cpp @@ -36,9 +36,12 @@ HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv, void HybridValues::print(const std::string& s, const KeyFormatter& keyFormatter) const { std::cout << s << ": \n"; - continuous_.print(" Continuous", - keyFormatter); // print continuous components - discrete_.print(" Discrete", keyFormatter); // print discrete components + // print continuous components + continuous_.print(" Continuous", keyFormatter); + // print discrete components + discrete_.print(" Discrete", keyFormatter); + // print nonlinear components + nonlinear_.print(" Nonlinear", keyFormatter); } /* ************************************************************************* */ From 9b6facd26246051b4d9396b81c503e10350695a8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 15:33:24 -0400 Subject: [PATCH 21/28] add documentation for additive scalar in the error and remove the 0.5 since it gets cancelled out during normalization --- gtsam/hybrid/HybridGaussianFactor.h | 9 +++++++++ gtsam/hybrid/HybridNonlinearFactor.h | 15 +++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 8c73b7952c..d724bdff3c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -45,6 +45,15 @@ using GaussianFactorValuePair = std::pair; * where the set of discrete variables indexes to * the continuous gaussian distribution. * + * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * the negative log-likelihood for a Gaussian noise model. + * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on + * the discrete assignment. + * For example, adding a 70/30 mode probability is supported by providing the + * scalars $-log(.7)$ and $-log(.3)$. + * Note that adding a common constant will not make any difference in the + * optimization, so $-log(70)$ and $-log(30)$ work just as well. + * * @ingroup hybrid */ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 3c0e2ab91f..a0c7af92be 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -45,6 +45,17 @@ using NonlinearFactorValuePair = std::pair; * This class stores all factors as HybridFactors which can then be typecast to * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * the correct operation. + * + * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * the negative log-likelihood for a Gaussian noise model. + * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on + * the discrete assignment. + * For example, adding a 70/30 mode probability is supported by providing the + * scalars $-log(.7)$ and $-log(.3)$. + * Note that adding a common constant will not make any difference in the + * optimization, so $-log(70)$ and $-log(30)$ work just as well. + * + * @ingroup hybrid */ class HybridNonlinearFactor : public HybridFactor { public: @@ -134,7 +145,7 @@ class HybridNonlinearFactor : public HybridFactor { auto errorFunc = [continuousValues](const std::pair& f) { auto [factor, val] = f; - return factor->error(continuousValues) + (0.5 * val); + return factor->error(continuousValues) + val; }; DecisionTree result(factors_, errorFunc); return result; @@ -153,7 +164,7 @@ class HybridNonlinearFactor : public HybridFactor { auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - return factorError + (0.5 * val); + return factorError + val; } /** From 244661afb1c1318c094cd9da37f8ef74ace9e3ac Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 16:09:54 -0400 Subject: [PATCH 22/28] rename ComputeLogNormalizer to ComputeLogNormalizerConstant --- gtsam/hybrid/HybridGaussianFactor.cpp | 6 +++--- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 3 ++- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 3 ++- gtsam/linear/NoiseModel.cpp | 4 ++-- gtsam/linear/NoiseModel.h | 4 ++-- gtsam/linear/tests/testNoiseModel.cpp | 6 +++--- 6 files changed, 14 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index cabe6defcc..13b26db6c3 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -30,8 +30,8 @@ namespace gtsam { /** * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in + * with the additional scalar values. + * This is done by storing the value in * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactors and arbitrary scalars. @@ -56,7 +56,7 @@ HybridGaussianFactor::Factors augment( const HybridGaussianFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If the log_normalizer is 0, do nothing + // If the value is 0, do nothing if (values(assignment) == 0.0) return gf; GaussianFactorGraph gfg; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 246b8d4ce7..63b37e7d47 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -771,7 +771,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor std::vector factors{ - {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + {f0, ComputeLogNormalizerConstant(model0)}, + {f1, ComputeLogNormalizerConstant(model1)}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 133b165c1a..98bbd36d89 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -869,7 +869,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor std::vector factors{ - {f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}}; + {f0, ComputeLogNormalizerConstant(model0)}, + {f1, ComputeLogNormalizerConstant(model1)}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index a586d119b7..d6526fb9cc 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -710,7 +710,7 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ } // namespace noiseModel /* *******************************************************************************/ -double ComputeLogNormalizer( +double ComputeLogNormalizerConstant( const noiseModel::Gaussian::shared_ptr& noise_model) { // Since noise models are Gaussian, we can get the logDeterminant using // the same trick as in GaussianConditional @@ -725,7 +725,7 @@ double ComputeLogNormalizer( size_t n = noise_model->dim(); constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; + return 0.5*(n * log2pi + logDeterminantSigma); } } // gtsam diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index ffc1a3ebcd..d2ceb24dbe 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -757,10 +757,10 @@ namespace gtsam { * We compute this in the log-space for numerical accuracy. * * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. + * whose normalization constant we wish to compute. * @return double */ - GTSAM_EXPORT double ComputeLogNormalizer( + GTSAM_EXPORT double ComputeLogNormalizerConstant( const noiseModel::Gaussian::shared_ptr& noise_model); } //\ namespace gtsam diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 49874f901c..4248f5beba 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,11 +807,11 @@ TEST(NoiseModel, NonDiagonalGaussian) } } -TEST(NoiseModel, ComputeLogNormalizer) { +TEST(NoiseModel, ComputeLogNormalizerConstant) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = ComputeLogNormalizer(noise_model); + double actual_value = ComputeLogNormalizerConstant(noise_model); // Compute log(|2πΣ|) by hand. // = log(2π) + log(Σ) (since it is 1D) constexpr double log2pi = 1.8378770664093454835606594728112; @@ -821,7 +821,7 @@ TEST(NoiseModel, ComputeLogNormalizer) { // Similar situation in the 3D case size_t n = 3; auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = ComputeLogNormalizer(noise_model2); + double actual_value2 = ComputeLogNormalizerConstant(noise_model2); // We multiply by 3 due to the determinant double expected_value2 = n * (log2pi + log(sigma * sigma)); EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); From 4f888291bfa1dfba6d1b0c2a92ed90d459b620fe Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 16:11:28 -0400 Subject: [PATCH 23/28] fix docstring for HybridGaussianFactor --- gtsam/hybrid/HybridGaussianFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index d724bdff3c..a86714863c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -45,7 +45,7 @@ using GaussianFactorValuePair = std::pair; * where the set of discrete variables indexes to * the continuous gaussian distribution. * - * In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e., + * In factor graphs the error function typically returns 0.5*|A*x - b|^2, i.e., * the negative log-likelihood for a Gaussian noise model. * In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on * the discrete assignment. From d60a253fcbac0574790d5f7e70fa09a6363f4a31 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 17:50:15 -0400 Subject: [PATCH 24/28] logNormalizationConstant is now a method for Gaussian noise model --- gtsam/linear/NoiseModel.cpp | 39 +++++++++++++-------------- gtsam/linear/NoiseModel.h | 27 ++++++++++--------- gtsam/linear/tests/testNoiseModel.cpp | 13 +++++---- 3 files changed, 39 insertions(+), 40 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index d6526fb9cc..af3081b9f2 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -238,6 +238,25 @@ void Gaussian::WhitenSystem(Matrix& A1, Matrix& A2, Matrix& A3, Vector& b) const Matrix Gaussian::information() const { return R().transpose() * R(); } +/* *******************************************************************************/ +double Gaussian::logNormalizationConstant() const { + // Since noise models are Gaussian, we can get the logDeterminant easily + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = -2.0 * logDeterminant() + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) + // = -0.5*n*log(2*pi) + logDeterminant() + double logDetR = + R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + + size_t n = dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + // Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) + return -0.5 * n * log2pi + logDetR; +} + + /* ************************************************************************* */ // Diagonal /* ************************************************************************* */ @@ -708,24 +727,4 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){ /* ************************************************************************* */ } // namespace noiseModel - -/* *******************************************************************************/ -double ComputeLogNormalizerConstant( - const noiseModel::Gaussian::shared_ptr& noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} - // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = -2.0 * logDetR() - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return 0.5*(n * log2pi + logDeterminantSigma); -} - } // gtsam diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index d2ceb24dbe..b8e6c09bc7 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -266,7 +266,20 @@ namespace gtsam { /// Compute covariance matrix virtual Matrix covariance() const; - private: + /** + * @brief Helper method to compute the normalization constant + * for a Gaussian noise model. + * k = 1/log(\sqrt(|2πΣ|)) + * + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalization constant we wish to compute. + * @return double + */ + double logNormalizationConstant() const; + + private: #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; @@ -751,18 +764,6 @@ namespace gtsam { template<> struct traits : public Testable {}; template<> struct traits : public Testable {}; - /** - * @brief Helper function to compute the log(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalization constant we wish to compute. - * @return double - */ - GTSAM_EXPORT double ComputeLogNormalizerConstant( - const noiseModel::Gaussian::shared_ptr& noise_model); - } //\ namespace gtsam diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 4248f5beba..b6e5785b62 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -811,19 +811,18 @@ TEST(NoiseModel, ComputeLogNormalizerConstant) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = ComputeLogNormalizerConstant(noise_model); - // Compute log(|2πΣ|) by hand. - // = log(2π) + log(Σ) (since it is 1D) - constexpr double log2pi = 1.8378770664093454835606594728112; - double expected_value = log2pi + log(sigma * sigma); + double actual_value = noise_model->logNormalizationConstant(); + // Compute 1/log(sqrt(|2πΣ|)) by hand. + // = -0.5*(log(2π) + log(Σ)) (since it is 1D) + double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); // Similar situation in the 3D case size_t n = 3; auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = ComputeLogNormalizerConstant(noise_model2); + double actual_value2 = noise_model2->logNormalizationConstant(); // We multiply by 3 due to the determinant - double expected_value2 = n * (log2pi + log(sigma * sigma)); + double expected_value2 = -0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); } From cea0dd577d087ec11e7585ea5f5b88970805447a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 19 Sep 2024 17:50:25 -0400 Subject: [PATCH 25/28] update tests --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 5 +++-- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 63b37e7d47..1ecd771325 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -770,9 +770,10 @@ static HybridGaussianFactorGraph CreateFactorGraph( ->linearize(values); // Create HybridGaussianFactor + // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) std::vector factors{ - {f0, ComputeLogNormalizerConstant(model0)}, - {f1, ComputeLogNormalizerConstant(model1)}}; + {f0, -2 * model0->logNormalizationConstant()}, + {f1, -2 * model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 98bbd36d89..8d2a5a74b7 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -868,9 +868,10 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor + // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) std::vector factors{ - {f0, ComputeLogNormalizerConstant(model0)}, - {f1, ComputeLogNormalizerConstant(model1)}}; + {f0, -2 * model0->logNormalizationConstant()}, + {f1, -2 * model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); From 1ab82f382cf390704e7321336c174f4cf12dd945 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 03:26:07 -0400 Subject: [PATCH 26/28] hide sqrt(2*value) so the user doesn't have to premultiply by 2 --- gtsam/hybrid/HybridGaussianConditional.cpp | 3 +-- gtsam/hybrid/HybridGaussianFactor.cpp | 19 +++++++++++-------- .../hybrid/tests/testHybridGaussianFactor.cpp | 7 ++++--- .../tests/testHybridNonlinearFactorGraph.cpp | 7 ++++--- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 79673d9f20..df59637aa5 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -222,8 +222,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( } else { // Add a constant to the likelihood in case the noise models // are not all equal. - double c = 2.0 * Cgm_Kgcm; - return {likelihood_m, c}; + return {likelihood_m, Cgm_Kgcm}; } }); return std::make_shared( diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 13b26db6c3..2fbd4bd888 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -46,31 +46,34 @@ HybridGaussianFactor::Factors augment( AlgebraicDecisionTree valueTree; std::tie(gaussianFactors, valueTree) = unzip(factors); - // Normalize + // Compute minimum value for normalization. double min_value = valueTree.min(); - AlgebraicDecisionTree values = - valueTree.apply([&min_value](double n) { return n - min_value; }); // Finally, update the [A|b] matrices. - auto update = [&values](const Assignment &assignment, - const HybridGaussianFactor::sharedFactor &gf) { + auto update = [&min_value](const GaussianFactorValuePair &gfv) { + auto [gf, value] = gfv; + auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; + + double normalized_value = value - min_value; + // If the value is 0, do nothing - if (values(assignment) == 0.0) return gf; + if (normalized_value == 0.0) return gf; GaussianFactorGraph gfg; gfg.push_back(jf); Vector c(1); - c << std::sqrt(values(assignment)); + // When hiding c inside the `b` vector, value == 0.5*c^2 + c << std::sqrt(2.0 * normalized_value); auto constantFactor = std::make_shared(c); gfg.push_back(constantFactor); return std::dynamic_pointer_cast( std::make_shared(gfg)); }; - return gaussianFactors.apply(update); + return HybridGaussianFactor::Factors(factors, update); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 1ecd771325..bfa2839837 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -770,10 +770,11 @@ static HybridGaussianFactorGraph CreateFactorGraph( ->linearize(values); // Create HybridGaussianFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 8d2a5a74b7..621c8708ed 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -868,10 +868,11 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); From 364b4b4a755cdb1c5299dc770cbade9357d24245 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 04:35:29 -0400 Subject: [PATCH 27/28] logDetR method which leverages noise model for efficiency. Build logDeterminant and logNormalizationConstant on top of it. --- gtsam/linear/NoiseModel.cpp | 37 +++++++++++++++++++++++++++++-------- gtsam/linear/NoiseModel.h | 28 +++++++++++++++++++--------- 2 files changed, 48 insertions(+), 17 deletions(-) diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index af3081b9f2..de69fdce97 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -239,21 +239,31 @@ void Gaussian::WhitenSystem(Matrix& A1, Matrix& A2, Matrix& A3, Vector& b) const Matrix Gaussian::information() const { return R().transpose() * R(); } /* *******************************************************************************/ -double Gaussian::logNormalizationConstant() const { +double Gaussian::logDetR() const { + double logDetR = + R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + return logDetR; +} + +/* *******************************************************************************/ +double Gaussian::logDeterminant() const { // Since noise models are Gaussian, we can get the logDeterminant easily // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) - // Hence, log det(Sigma)) = -2.0 * logDeterminant() - // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) - // = -0.5*n*log(2*pi) + logDeterminant() - double logDetR = - R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + // Hence, log det(Sigma)) = -2.0 * logDetR() + return -2.0 * logDetR(); +} +/* *******************************************************************************/ +double Gaussian::logNormalizationConstant() const { + // log(det(Sigma)) = -2.0 * logDetR + // which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDetR()) + // = -0.5*n*log(2*pi) + (0.5*2.0 * logDetR()) + // = -0.5*n*log(2*pi) + logDetR() size_t n = dim(); constexpr double log2pi = 1.8378770664093454835606594728112; // Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) - return -0.5 * n * log2pi + logDetR; + return -0.5 * n * log2pi + logDetR(); } @@ -333,6 +343,11 @@ void Diagonal::WhitenInPlace(Eigen::Block H) const { H = invsigmas().asDiagonal() * H; } +/* *******************************************************************************/ +double Diagonal::logDetR() const { + return invsigmas_.unaryExpr([](double x) { return log(x); }).sum(); +} + /* ************************************************************************* */ // Constrained /* ************************************************************************* */ @@ -661,6 +676,9 @@ void Isotropic::WhitenInPlace(Eigen::Block H) const { H *= invsigma_; } +/* *******************************************************************************/ +double Isotropic::logDetR() const { return log(invsigma_) * dim(); } + /* ************************************************************************* */ // Unit /* ************************************************************************* */ @@ -673,6 +691,9 @@ double Unit::squaredMahalanobisDistance(const Vector& v) const { return v.dot(v); } +/* *******************************************************************************/ +double Unit::logDetR() const { return 0.0; } + /* ************************************************************************* */ // Robust /* ************************************************************************* */ diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index b8e6c09bc7..91c11d4101 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -183,6 +183,8 @@ namespace gtsam { return *sqrt_information_; } + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const; public: @@ -266,15 +268,15 @@ namespace gtsam { /// Compute covariance matrix virtual Matrix covariance() const; + /// Compute the log of |Σ| + double logDeterminant() const; + /** - * @brief Helper method to compute the normalization constant - * for a Gaussian noise model. - * k = 1/log(\sqrt(|2πΣ|)) - * - * We compute this in the log-space for numerical accuracy. + * @brief Method to compute the normalization constant + * for a Gaussian noise model k = \sqrt(1/|2πΣ|). + * We compute this in the log-space for numerical accuracy, + * thus returning log(k). * - * @param noise_model The Gaussian noise model - * whose normalization constant we wish to compute. * @return double */ double logNormalizationConstant() const; @@ -308,11 +310,12 @@ namespace gtsam { */ Vector sigmas_, invsigmas_, precisions_; - protected: - /** constructor to allow for disabling initialization of invsigmas */ Diagonal(const Vector& sigmas); + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: /** constructor - no initializations, for serialization */ Diagonal(); @@ -545,6 +548,9 @@ namespace gtsam { Isotropic(size_t dim, double sigma) : Diagonal(Vector::Constant(dim, sigma)),sigma_(sigma),invsigma_(1.0/sigma) {} + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: /* dummy constructor to allow for serialization */ @@ -608,6 +614,10 @@ namespace gtsam { * Unit: i.i.d. unit-variance noise on all m dimensions. */ class GTSAM_EXPORT Unit : public Isotropic { + protected: + /// Compute the log of |R|. Used for computing log(|Σ|) + virtual double logDetR() const override; + public: typedef std::shared_ptr shared_ptr; From 67a8b8fea04541b8994d863457d36cdaaebc0f8d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 04:35:37 -0400 Subject: [PATCH 28/28] comprehensive unit testing --- gtsam/linear/tests/testNoiseModel.cpp | 80 +++++++++++++++++++++++---- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index b6e5785b62..6aea154ee4 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,23 +807,81 @@ TEST(NoiseModel, NonDiagonalGaussian) } } -TEST(NoiseModel, ComputeLogNormalizerConstant) { +TEST(NoiseModel, LogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - auto noise_model = Isotropic::Sigma(1, sigma); - double actual_value = noise_model->logNormalizationConstant(); - // Compute 1/log(sqrt(|2πΣ|)) by hand. + // For expected values, we compute 1/log(sqrt(|2πΣ|)) by hand. // = -0.5*(log(2π) + log(Σ)) (since it is 1D) double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); - EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); - // Similar situation in the 3D case + // Gaussian + { + Matrix11 R; + R << 1 / sigma; + auto noise_model = Gaussian::SqrtInformation(R); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Diagonal + { + auto noise_model = Diagonal::Sigmas(Vector1(sigma)); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Isotropic + { + auto noise_model = Isotropic::Sigma(1, sigma); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Unit + { + auto noise_model = Unit::Create(1); + double actual_value = noise_model->logNormalizationConstant(); + double sigma = 1.0; + expected_value = -0.5 * log(2 * M_PI * sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } +} + +TEST(NoiseModel, LogNormalizationConstant3D) { + // Simple 3D noise model, which we can compute by hand. + double sigma = 0.1; size_t n = 3; - auto noise_model2 = Isotropic::Sigma(n, sigma); - double actual_value2 = noise_model2->logNormalizationConstant(); - // We multiply by 3 due to the determinant - double expected_value2 = -0.5 * n * log(2 * M_PI * sigma * sigma); - EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); + // We compute the expected values just like in the LogNormalizationConstant1D + // test, but we multiply by 3 due to the determinant. + double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + + // Gaussian + { + Matrix33 R; + R << 1 / sigma, 2, 3, // + 0, 1 / sigma, 4, // + 0, 0, 1 / sigma; + auto noise_model = Gaussian::SqrtInformation(R); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Diagonal + { + auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma)); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Isotropic + { + auto noise_model = Isotropic::Sigma(n, sigma); + double actual_value = noise_model->logNormalizationConstant(); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } + // Unit + { + auto noise_model = Unit::Create(3); + double actual_value = noise_model->logNormalizationConstant(); + double sigma = 1.0; + expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + } } /* ************************************************************************* */