From 9f7ccbb33c29a69459054ddfa0e61ca0abcd4526 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 17:57:53 +0900 Subject: [PATCH] Minimize formatting changes --- gtsam/hybrid/HybridGaussianConditional.cpp | 154 +++++++++--------- gtsam/hybrid/HybridGaussianFactor.cpp | 54 +++--- gtsam/hybrid/HybridGaussianFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 149 +++++++++-------- gtsam/hybrid/HybridGaussianProductFactor.cpp | 9 +- gtsam/hybrid/HybridGaussianProductFactor.h | 30 ++-- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +- gtsam/hybrid/HybridNonlinearFactorGraph.h | 4 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 30 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 88 ++++++---- gtsam/hybrid/tests/testHybridBayesTree.cpp | 46 +++--- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 - .../tests/testHybridGaussianConditional.cpp | 87 +++++----- .../tests/testHybridGaussianFactorGraph.cpp | 152 +++++++++-------- .../tests/testHybridGaussianProductFactor.cpp | 7 +- gtsam/hybrid/tests/testHybridMotionModel.cpp | 73 +++++---- .../tests/testHybridNonlinearFactorGraph.cpp | 1 - 17 files changed, 509 insertions(+), 385 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index c3c1b893ec..4dcb840341 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -40,7 +40,8 @@ namespace gtsam { * - nrFrontals: Optional size_t for number of frontal variables * - pairs: FactorValuePairs for storing conditionals with their negLogConstant * - conditionals: Conditionals for storing conditionals. TODO(frank): kill! - * - minNegLogConstant: minimum negLogConstant, computed here, subtracted in constructor + * - minNegLogConstant: minimum negLogConstant, computed here, subtracted in + * constructor */ struct HybridGaussianConditional::Helper { std::optional nrFrontals; @@ -53,7 +54,7 @@ struct HybridGaussianConditional::Helper { /// Construct from a vector of mean and sigma pairs, plus extra args. template - explicit Helper(const DiscreteKey& mode, const P& p, Args&&... args) { + explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) { nrFrontals = 1; minNegLogConstant = std::numeric_limits::infinity(); @@ -61,8 +62,9 @@ struct HybridGaussianConditional::Helper { std::vector gcs; fvs.reserve(p.size()); gcs.reserve(p.size()); - for (auto&& [mean, sigma] : p) { - auto gaussianConditional = GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); + for (auto &&[mean, sigma] : p) { + auto gaussianConditional = + GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); double value = gaussianConditional->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); fvs.emplace_back(gaussianConditional, value); @@ -74,8 +76,9 @@ struct HybridGaussianConditional::Helper { } /// Construct from tree of GaussianConditionals. - explicit Helper(const Conditionals& conditionals) - : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { + explicit Helper(const Conditionals &conditionals) + : conditionals(conditionals), + minNegLogConstant(std::numeric_limits::infinity()) { auto func = [this](const GC::shared_ptr& gc) -> GaussianFactorValuePair { if (!gc) return {nullptr, std::numeric_limits::infinity()}; if (!nrFrontals) nrFrontals = gc->nrFrontals(); @@ -93,67 +96,63 @@ struct HybridGaussianConditional::Helper { }; /* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional(const DiscreteKeys& discreteParents, - const Helper& helper) - : BaseFactor( - discreteParents, - FactorValuePairs( - helper.pairs, - [&](const GaussianFactorValuePair& pair) { // subtract minNegLogConstant - return GaussianFactorValuePair{pair.first, pair.second - helper.minNegLogConstant}; - })), +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKeys& discreteParents, const Helper& helper) + : BaseFactor(discreteParents, + FactorValuePairs(helper.pairs, + [&](const GaussianFactorValuePair& + pair) { // subtract minNegLogConstant + return GaussianFactorValuePair{ + pair.first, + pair.second - helper.minNegLogConstant}; + })), BaseConditional(*helper.nrFrontals), conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - const std::vector& conditionals) + const DiscreteKey &discreteParent, + const std::vector &conditionals) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Conditionals({discreteParent}, conditionals)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const std::vector>& parameters) + const DiscreteKey &discreteParent, Key key, // + const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Helper(discreteParent, parameters, key)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const Matrix& A, - Key parent, - const std::vector>& parameters) - : HybridGaussianConditional(DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A, parent)) {} + const DiscreteKey &discreteParent, Key key, // + const Matrix &A, Key parent, + const std::vector> ¶meters) + : HybridGaussianConditional( + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A, parent)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const Matrix& A1, - Key parent1, - const Matrix& A2, - Key parent2, - const std::vector>& parameters) - : HybridGaussianConditional(DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) { -} + const DiscreteKey &discreteParent, Key key, // + const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, + const std::vector> ¶meters) + : HybridGaussianConditional( + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys& discreteParents, - const HybridGaussianConditional::Conditionals& conditionals) + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals) : HybridGaussianConditional(discreteParents, Helper(conditionals)) {} /* *******************************************************************************/ -const HybridGaussianConditional::Conditionals& HybridGaussianConditional::conditionals() const { +const HybridGaussianConditional::Conditionals & +HybridGaussianConditional::conditionals() const { return conditionals_; } /* *******************************************************************************/ size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; - conditionals_.visit([&total](const GaussianFactor::shared_ptr& node) { + conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { if (node) total += 1; }); return total; @@ -161,19 +160,21 @@ size_t HybridGaussianConditional::nrComponents() const { /* *******************************************************************************/ GaussianConditional::shared_ptr HybridGaussianConditional::choose( - const DiscreteValues& discreteValues) const { - auto& ptr = conditionals_(discreteValues); + const DiscreteValues &discreteValues) const { + auto &ptr = conditionals_(discreteValues); if (!ptr) return nullptr; auto conditional = std::dynamic_pointer_cast(ptr); if (conditional) return conditional; else - throw std::logic_error("A HybridGaussianConditional unexpectedly contained a non-conditional"); + throw std::logic_error( + "A HybridGaussianConditional unexpectedly contained a non-conditional"); } /* *******************************************************************************/ -bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const { - const This* e = dynamic_cast(&lf); +bool HybridGaussianConditional::equals(const HybridFactor &lf, + double tol) const { + const This *e = dynamic_cast(&lf); if (e == nullptr) return false; // This will return false if either conditionals_ is empty or e->conditionals_ @@ -182,26 +183,27 @@ bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const // Check the base and the factors: return BaseFactor::equals(*e, tol) && - conditionals_.equals(e->conditionals_, [tol](const auto& f1, const auto& f2) { - return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); - }); + conditionals_.equals( + e->conditionals_, [tol](const auto &f1, const auto &f2) { + return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + }); } /* *******************************************************************************/ -void HybridGaussianConditional::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianConditional::print(const std::string &s, + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); BaseConditional::print("", formatter); std::cout << " Discrete Keys = "; - for (auto& dk : discreteKeys()) { + for (auto &dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl << " logNormalizationConstant: " << -negLogConstant() << std::endl << std::endl; conditionals_.print( - "", - [&](Key k) { return formatter(k); }, - [&](const GaussianConditional::shared_ptr& gf) -> std::string { + "", [&](Key k) { return formatter(k); }, + [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; if (gf && !gf->empty()) { gf->print("", formatter); @@ -218,19 +220,20 @@ KeyVector HybridGaussianConditional::continuousParents() const { const auto range = parents(); KeyVector continuousParentKeys(range.begin(), range.end()); // Loop over all discrete keys: - for (const auto& discreteKey : discreteKeys()) { + for (const auto &discreteKey : discreteKeys()) { const Key key = discreteKey.first; // remove that key from continuousParentKeys: - continuousParentKeys.erase( - std::remove(continuousParentKeys.begin(), continuousParentKeys.end(), key), + continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), + continuousParentKeys.end(), key), continuousParentKeys.end()); } return continuousParentKeys; } /* ************************************************************************* */ -bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) const { - for (auto&& kv : given) { +bool HybridGaussianConditional::allFrontalsGiven( + const VectorValues &given) const { + for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; } @@ -240,7 +243,7 @@ bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) cons /* ************************************************************************* */ std::shared_ptr HybridGaussianConditional::likelihood( - const VectorValues& given) const { + const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( "HybridGaussianConditional::likelihood: given values are missing some " @@ -251,29 +254,32 @@ std::shared_ptr HybridGaussianConditional::likelihood( const KeyVector continuousParentKeys = continuousParents(); const HybridGaussianFactor::FactorValuePairs likelihoods( conditionals_, - [&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair { + [&](const GaussianConditional::shared_ptr &conditional) + -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; return {likelihood_m, Cgm_Kgcm}; }); - return std::make_shared(discreteParentKeys, likelihoods); + return std::make_shared(discreteParentKeys, + likelihoods); } /* ************************************************************************* */ -std::set DiscreteKeysAsSet(const DiscreteKeys& discreteKeys) { +std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { std::set s(discreteKeys.begin(), discreteKeys.end()); return s; } /* *******************************************************************************/ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( - const DecisionTreeFactor& discreteProbs) const { + const DecisionTreeFactor &discreteProbs) const { // Find keys in discreteProbs.keys() but not in this->keys(): std::set mine(this->keys().begin(), this->keys().end()); - std::set theirs(discreteProbs.keys().begin(), discreteProbs.keys().end()); + std::set theirs(discreteProbs.keys().begin(), + discreteProbs.keys().end()); std::vector diff; - std::set_difference( - theirs.begin(), theirs.end(), mine.begin(), mine.end(), std::back_inserter(diff)); + std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(), + std::back_inserter(diff)); // Find maximum probability value for every combination of our keys. Ordering keys(diff); @@ -281,24 +287,26 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( // Check the max value for every combination of our keys. // If the max value is 0.0, we can prune the corresponding conditional. - auto pruner = - [&](const Assignment& choices, - const GaussianConditional::shared_ptr& conditional) -> GaussianConditional::shared_ptr { + auto pruner = [&](const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { return (max->evaluate(choices) == 0.0) ? nullptr : conditional; }; auto pruned_conditionals = conditionals_.apply(pruner); - return std::make_shared(discreteKeys(), pruned_conditionals); + return std::make_shared(discreteKeys(), + pruned_conditionals); } /* *******************************************************************************/ -double HybridGaussianConditional::logProbability(const HybridValues& values) const { +double HybridGaussianConditional::logProbability( + const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } /* *******************************************************************************/ -double HybridGaussianConditional::evaluate(const HybridValues& values) const { +double HybridGaussianConditional::evaluate(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->evaluate(values.continuous()); } diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 44a56910ed..afc818c4a7 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -50,12 +50,15 @@ struct HybridGaussianFactor::ConstructorHelper { } // Build the FactorValuePairs DecisionTree pairs = FactorValuePairs( - DecisionTree(discreteKeys, factors), [](const auto& f) { - return std::pair{f, f ? 0.0 : std::numeric_limits::infinity()}; + DecisionTree(discreteKeys, factors), + [](const auto& f) { + return std::pair{f, + f ? 0.0 : std::numeric_limits::infinity()}; }); } - /// Constructor for a single discrete key and a vector of GaussianFactorValuePairs + /// Constructor for a single discrete key and a vector of + /// GaussianFactorValuePairs ConstructorHelper(const DiscreteKey& discreteKey, const std::vector& factorPairs) : discreteKeys({discreteKey}) { @@ -71,8 +74,10 @@ struct HybridGaussianFactor::ConstructorHelper { pairs = FactorValuePairs(discreteKeys, factorPairs); } - /// Constructor for a vector of discrete keys and a vector of GaussianFactorValuePairs - ConstructorHelper(const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs) + /// Constructor for a vector of discrete keys and a vector of + /// GaussianFactorValuePairs + ConstructorHelper(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factorPairs) : discreteKeys(discreteKeys) { // Extract continuous keys from the first non-null factor // TODO: just stop after first non-null factor @@ -88,24 +93,27 @@ struct HybridGaussianFactor::ConstructorHelper { }; /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper) - : Base(helper.continuousKeys, helper.discreteKeys), factors_(helper.pairs) {} +HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.pairs) {} HybridGaussianFactor::HybridGaussianFactor( - const DiscreteKey& discreteKey, const std::vector& factorPairs) - : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} + const DiscreteKey &discreteKey, + const std::vector &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKey& discreteKey, - const std::vector& factorPairs) +HybridGaussianFactor::HybridGaussianFactor( + const DiscreteKey &discreteKey, + const std::vector &factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys& discreteKeys, - const FactorValuePairs& factorPairs) - : HybridGaussianFactor(ConstructorHelper(discreteKeys, factorPairs)) {} +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} /* *******************************************************************************/ -bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const { - const This* e = dynamic_cast(&lf); +bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { + const This *e = dynamic_cast(&lf); if (e == nullptr) return false; // This will return false if either factors_ is empty or e->factors_ is @@ -122,7 +130,8 @@ bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const { } /* *******************************************************************************/ -void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianFactor::print(const std::string &s, + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); HybridFactor::print("", formatter); std::cout << "{\n"; @@ -148,7 +157,8 @@ void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& forma } /* *******************************************************************************/ -GaussianFactorValuePair HybridGaussianFactor::operator()(const DiscreteValues& assignment) const { +GaussianFactorValuePair HybridGaussianFactor::operator()( + const DiscreteValues &assignment) const { return factors_(assignment); } @@ -156,15 +166,17 @@ GaussianFactorValuePair HybridGaussianFactor::operator()(const DiscreteValues& a HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { // Implemented by creating a new DecisionTree where: // - The structure (keys and assignments) is preserved from factors_ - // - Each leaf converted to a GaussianFactorGraph with just the factor and its scalar. - return {{factors_, [](const auto& pair) -> std::pair { + // - Each leaf converted to a GaussianFactorGraph with just the factor and its + // scalar. + return {{factors_, + [](const auto& pair) -> std::pair { return {GaussianFactorGraph{pair.first}, pair.second}; }}}; } /* *******************************************************************************/ AlgebraicDecisionTree HybridGaussianFactor::errorTree( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const auto& pair) { return pair.first ? pair.first->error(continuousValues) + pair.second diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 08debcf48f..48a8bf48de 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -191,4 +191,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2d90f2dea9..26cdb8dd2c 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -55,21 +55,23 @@ template class EliminateableFactorGraph; using std::dynamic_pointer_cast; using OrphanWrapper = BayesTreeOrphanWrapper; -using Result = std::pair, GaussianFactor::shared_ptr>; +using Result = + std::pair, GaussianFactor::shared_ptr>; using ResultTree = DecisionTree>; static const VectorValues kEmpty; /* ************************************************************************ */ // Throw a runtime exception for method specified in string s, and factor f: -static void throwRuntimeError(const std::string& s, const std::shared_ptr& f) { +static void throwRuntimeError(const std::string& s, + const std::shared_ptr& f) { auto& fr = *f; - throw std::runtime_error(s + " not implemented for factor type " + demangle(typeid(fr).name()) + - "."); + throw std::runtime_error(s + " not implemented for factor type " + + demangle(typeid(fr).name()) + "."); } /* ************************************************************************ */ -const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) { +const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { KeySet discrete_keys = graph.discreteKeySet(); const VariableIndex index(graph); return Ordering::ColamdConstrainedLast( @@ -77,20 +79,21 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) { } /* ************************************************************************ */ -static void printFactor(const std::shared_ptr& factor, - const DiscreteValues& assignment, - const KeyFormatter& keyFormatter) { - if (auto hgf = std::dynamic_pointer_cast(factor)) { +static void printFactor(const std::shared_ptr &factor, + const DiscreteValues &assignment, + const KeyFormatter &keyFormatter) { + if (auto hgf = dynamic_pointer_cast(factor)) { if (assignment.empty()) hgf->print("HybridGaussianFactor:", keyFormatter); else - hgf->operator()(assignment).first->print("HybridGaussianFactor, component:", keyFormatter); - } else if (auto gf = std::dynamic_pointer_cast(factor)) { + hgf->operator()(assignment) + .first->print("HybridGaussianFactor, component:", keyFormatter); + } else if (auto gf = dynamic_pointer_cast(factor)) { factor->print("GaussianFactor:\n", keyFormatter); - } else if (auto df = std::dynamic_pointer_cast(factor)) { + } else if (auto df = dynamic_pointer_cast(factor)) { factor->print("DiscreteFactor:\n", keyFormatter); - } else if (auto hc = std::dynamic_pointer_cast(factor)) { + } else if (auto hc = dynamic_pointer_cast(factor)) { if (hc->isContinuous()) { factor->print("GaussianConditional:\n", keyFormatter); } else if (hc->isDiscrete()) { @@ -99,7 +102,9 @@ static void printFactor(const std::shared_ptr& factor, if (assignment.empty()) hc->print("HybridConditional:", keyFormatter); else - hc->asHybrid()->choose(assignment)->print("HybridConditional, component:\n", keyFormatter); + hc->asHybrid() + ->choose(assignment) + ->print("HybridConditional, component:\n", keyFormatter); } } else { factor->print("Unknown factor type\n", keyFormatter); @@ -128,15 +133,15 @@ void HybridGaussianFactorGraph::print(const std::string& s, /* ************************************************************************ */ void HybridGaussianFactorGraph::printErrors( - const HybridValues& values, - const std::string& str, - const KeyFormatter& keyFormatter, - const std::function& - printCondition) const { - std::cout << str << " size: " << size() << std::endl << std::endl; + const HybridValues &values, const std::string &str, + const KeyFormatter &keyFormatter, + const std::function + &printCondition) const { + std::cout << str << "size: " << size() << std::endl << std::endl; for (size_t i = 0; i < factors_.size(); i++) { - auto&& factor = factors_[i]; + auto &&factor = factors_[i]; if (factor == nullptr) { std::cout << "Factor " << i << ": nullptr\n"; continue; @@ -154,7 +159,8 @@ void HybridGaussianFactorGraph::printErrors( } /* ************************************************************************ */ -HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() const { +HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() + const { HybridGaussianProductFactor result; for (auto& f : factors_) { @@ -194,10 +200,11 @@ HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() co } /* ************************************************************************ */ -static std::pair> continuousElimination( - const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { +static std::pair> +continuousElimination(const HybridGaussianFactorGraph &factors, + const Ordering &frontalKeys) { GaussianFactorGraph gfg; - for (auto& f : factors) { + for (auto &f : factors) { if (auto gf = dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto orphan = dynamic_pointer_cast(f)) { @@ -217,20 +224,20 @@ static std::pair> continu } /* ************************************************************************ */ -static std::pair> discreteElimination( - const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { +static std::pair> +discreteElimination(const HybridGaussianFactorGraph &factors, + const Ordering &frontalKeys) { DiscreteFactorGraph dfg; - for (auto& f : factors) { + for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - // TODO(frank): What about the scalar!? auto potential = [&](const auto& pair) -> double { auto [factor, scalar] = pair; - // If the factor is null, it has been pruned, hence return potential of zero + // If factor is null, it has been pruned, hence return potential zero if (!factor) return 0.0; return exp(-scalar - factor->error(kEmpty)); }; @@ -262,9 +269,10 @@ static std::pair> discret * The residual error contains no keys, and only * depends on the discrete separator if present. */ -static std::shared_ptr createDiscreteFactor(const ResultTree& eliminationResults, - const DiscreteKeys& discreteSeparator) { - auto potential = [&](const auto& pair) -> double { +static std::shared_ptr createDiscreteFactor( + const ResultTree& eliminationResults, + const DiscreteKeys &discreteSeparator) { + auto potential = [&](const auto &pair) -> double { const auto& [conditional, factor] = pair.first; const double scalar = pair.second; if (conditional && factor) { @@ -276,7 +284,8 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio const double error = scalar + factor->error(kEmpty) - negLogK; return exp(-error); } else if (!conditional && !factor) { - // If the factor is null, it has been pruned, hence return potential of zero + // If the factor is null, it has been pruned, hence return potential of + // zero return 0.0; } else { throw std::runtime_error("createDiscreteFactor has mixed NULLs"); @@ -290,11 +299,12 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio /* *******************************************************************************/ // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. -static std::shared_ptr createHybridGaussianFactor(const ResultTree& eliminationResults, - const DiscreteKeys& discreteSeparator) { +static std::shared_ptr createHybridGaussianFactor( + const ResultTree &eliminationResults, + const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional - auto correct = [&](const auto& pair) -> GaussianFactorValuePair { - const auto& [conditional, factor] = pair.first; + auto correct = [&](const auto &pair) -> GaussianFactorValuePair { + const auto &[conditional, factor] = pair.first; const double scalar = pair.second; if (conditional && factor) { const double negLogK = conditional->negLogConstant(); @@ -305,29 +315,32 @@ static std::shared_ptr createHybridGaussianFactor(const ResultTree& elim throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); } }; - DecisionTree newFactors(eliminationResults, correct); + DecisionTree newFactors(eliminationResults, + correct); return std::make_shared(discreteSeparator, newFactors); } /* *******************************************************************************/ /// Get the discrete keys from the HybridGaussianFactorGraph as DiscreteKeys. -static auto GetDiscreteKeys = [](const HybridGaussianFactorGraph& hfg) -> DiscreteKeys { +static auto GetDiscreteKeys = + [](const HybridGaussianFactorGraph &hfg) -> DiscreteKeys { const std::set discreteKeySet = hfg.discreteKeys(); return {discreteKeySet.begin(), discreteKeySet.end()}; }; /* *******************************************************************************/ std::pair> -HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { +HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // Since we eliminate all continuous variables first, // the discrete separator will be *all* the discrete keys. DiscreteKeys discreteSeparator = GetDiscreteKeys(*this); // Collect all the factors to create a set of Gaussian factor graphs in a - // decision tree indexed by all discrete keys involved. Just like any hybrid factor, every - // assignment also has a scalar error, in this case the sum of all errors in the graph. This error - // is assignment-specific and accounts for any difference in noise models used. + // decision tree indexed by all discrete keys involved. Just like any hybrid + // factor, every assignment also has a scalar error, in this case the sum of + // all errors in the graph. This error is assignment-specific and accounts for + // any difference in noise models used. HybridGaussianProductFactor productFactor = collectProductFactor(); // Convert factor graphs with a nullptr to an empty factor graph. @@ -337,8 +350,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { // This is the elimination method on the leaf nodes bool someContinuousLeft = false; - auto eliminate = - [&](const std::pair& pair) -> std::pair { + auto eliminate = [&](const std::pair& pair) + -> std::pair { const auto& [graph, scalar] = pair; if (graph.empty()) { @@ -346,7 +359,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { } // Expensive elimination of product factor. - auto result = EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE + auto result = + EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE // Record whether there any continuous variables left someContinuousLeft |= !result.second->empty(); @@ -361,15 +375,16 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. - auto newFactor = someContinuousLeft - ? createHybridGaussianFactor(eliminationResults, discreteSeparator) - : createDiscreteFactor(eliminationResults, discreteSeparator); + auto newFactor = + someContinuousLeft + ? createHybridGaussianFactor(eliminationResults, discreteSeparator) + : createDiscreteFactor(eliminationResults, discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const auto& pair) { return pair.first.first; }); - auto hybridGaussian = - std::make_shared(discreteSeparator, conditionals); + auto hybridGaussian = std::make_shared( + discreteSeparator, conditionals); return {std::make_shared(hybridGaussian), newFactor}; } @@ -389,7 +404,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { * be INCORRECT and there will be NO error raised. */ std::pair> // -EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) { +EliminateHybrid(const HybridGaussianFactorGraph &factors, + const Ordering &keys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: // 1. continuous variable, make a hybrid Gaussian conditional if there are @@ -440,7 +456,7 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) // 3. if not, we do hybrid elimination: bool only_discrete = true, only_continuous = true; - for (auto&& factor : factors) { + for (auto &&factor : factors) { if (auto hybrid_factor = std::dynamic_pointer_cast(factor)) { if (hybrid_factor->isDiscrete()) { only_continuous = false; @@ -451,9 +467,11 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) only_discrete = false; break; } - } else if (auto cont_factor = std::dynamic_pointer_cast(factor)) { + } else if (auto cont_factor = + std::dynamic_pointer_cast(factor)) { only_discrete = false; - } else if (auto discrete_factor = std::dynamic_pointer_cast(factor)) { + } else if (auto discrete_factor = + std::dynamic_pointer_cast(factor)) { only_continuous = false; } } @@ -474,17 +492,17 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. - for (auto& factor : factors_) { + for (auto &factor : factors_) { if (auto hf = std::dynamic_pointer_cast(factor)) { // Add errorTree for hybrid factors, includes HybridGaussianConditionals! result = result + hf->errorTree(continuousValues); } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well result = result + df->errorTree(); - } else if (auto gf = std::dynamic_pointer_cast(factor)) { + } else if (auto gf = dynamic_pointer_cast(factor)) { // For a continuous only factor, just add its error result = result + gf->error(continuousValues); } else { @@ -495,7 +513,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( } /* ************************************************************************ */ -double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const { +double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const { double error = this->error(values); // NOTE: The 0.5 term is handled by each factor return std::exp(-error); @@ -503,7 +521,7 @@ double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const { /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { AlgebraicDecisionTree errors = this->errorTree(continuousValues); AlgebraicDecisionTree p = errors.apply([](double error) { // NOTE: The 0.5 term is handled by each factor @@ -513,18 +531,19 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( } /* ************************************************************************ */ -GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assignment) const { +GaussianFactorGraph HybridGaussianFactorGraph::choose( + const DiscreteValues &assignment) const { GaussianFactorGraph gfg; - for (auto&& f : *this) { + for (auto &&f : *this) { if (auto gf = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto hgf = std::dynamic_pointer_cast(f)) { gfg.push_back((*hgf)(assignment).first); - } else if (auto hgc = std::dynamic_pointer_cast(f)) { + } else if (auto hgc = dynamic_pointer_cast(f)) { gfg.push_back((*hgc)(assignment)); - } else if (auto hc = std::dynamic_pointer_cast(f)) { + } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gc = hc->asGaussian()) gfg.push_back(gc); else if (auto hgc = hc->asHybrid()) diff --git a/gtsam/hybrid/HybridGaussianProductFactor.cpp b/gtsam/hybrid/HybridGaussianProductFactor.cpp index 2430750d1d..2e95ea8d11 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/HybridGaussianProductFactor.cpp @@ -59,7 +59,8 @@ HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=( return *this; } -void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianProductFactor::print(const std::string& s, + const KeyFormatter& formatter) const { KeySet keys; auto printer = [&](const Y& y) { if (keys.empty()) keys = y.first.keys(); @@ -77,8 +78,10 @@ void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const { auto emptyGaussian = [](const Y& y) { - bool hasNull = std::any_of( - y.first.begin(), y.first.end(), [](const GaussianFactor::shared_ptr& ptr) { return !ptr; }); + bool hasNull = + std::any_of(y.first.begin(), + y.first.end(), + [](const GaussianFactor::shared_ptr& ptr) { return !ptr; }); return hasNull ? Y{GaussianFactorGraph(), 0.0} : y; }; return {Base(*this, emptyGaussian)}; diff --git a/gtsam/hybrid/HybridGaussianProductFactor.h b/gtsam/hybrid/HybridGaussianProductFactor.h index 6d33daa1ba..9c2aee74ab 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.h +++ b/gtsam/hybrid/HybridGaussianProductFactor.h @@ -60,13 +60,16 @@ class HybridGaussianProductFactor ///@{ /// Add GaussianFactor into HybridGaussianProductFactor - HybridGaussianProductFactor operator+(const GaussianFactor::shared_ptr& factor) const; + HybridGaussianProductFactor operator+( + const GaussianFactor::shared_ptr& factor) const; /// Add HybridGaussianFactor into HybridGaussianProductFactor - HybridGaussianProductFactor operator+(const HybridGaussianFactor& factor) const; + HybridGaussianProductFactor operator+( + const HybridGaussianFactor& factor) const; /// Add-assign operator for GaussianFactor - HybridGaussianProductFactor& operator+=(const GaussianFactor::shared_ptr& factor); + HybridGaussianProductFactor& operator+=( + const GaussianFactor::shared_ptr& factor); /// Add-assign operator for HybridGaussianFactor HybridGaussianProductFactor& operator+=(const HybridGaussianFactor& factor); @@ -81,7 +84,8 @@ class HybridGaussianProductFactor * @param s Optional string to prepend * @param formatter Optional key formatter */ - void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; + void print(const std::string& s = "", + const KeyFormatter& formatter = DefaultKeyFormatter) const; /** * @brief Check if this HybridGaussianProductFactor is equal to another @@ -89,9 +93,11 @@ class HybridGaussianProductFactor * @param tol Tolerance for floating point comparisons * @return true if equal, false otherwise */ - bool equals(const HybridGaussianProductFactor& other, double tol = 1e-9) const { + bool equals(const HybridGaussianProductFactor& other, + double tol = 1e-9) const { return Base::equals(other, [tol](const Y& a, const Y& b) { - return a.first.equals(b.first, tol) && std::abs(a.second - b.second) < tol; + return a.first.equals(b.first, tol) && + std::abs(a.second - b.second) < tol; }); } @@ -102,12 +108,13 @@ class HybridGaussianProductFactor /** * @brief Remove empty GaussianFactorGraphs from the decision tree - * @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs removed + * @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs + * removed * * If any GaussianFactorGraph in the decision tree contains a nullptr, convert - * that leaf to an empty GaussianFactorGraph with zero scalar sum. This is needed because the - * DecisionTree will otherwise create a GaussianFactorGraph with a single (null) factor, which - * doesn't register as null. + * that leaf to an empty GaussianFactorGraph with zero scalar sum. This is + * needed because the DecisionTree will otherwise create a GaussianFactorGraph + * with a single (null) factor, which doesn't register as null. */ HybridGaussianProductFactor removeEmpty() const; @@ -116,6 +123,7 @@ class HybridGaussianProductFactor // Testable traits template <> -struct traits : public Testable {}; +struct traits + : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 56b75d15ee..2f5031cf28 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -181,19 +181,19 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( /* ************************************************************************* */ AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( - const Values& continuousValues) const { + const Values& values) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. for (auto& factor : factors_) { if (auto hnf = std::dynamic_pointer_cast(factor)) { // Compute factor error and add it. - result = result + hnf->errorTree(continuousValues); + result = result + hnf->errorTree(values); } else if (auto nf = std::dynamic_pointer_cast(factor)) { // If continuous only, get the (double) error // and add it to every leaf of the result - result = result + nf->error(continuousValues); + result = result + nf->error(values); } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index dd18cfa601..f79f7b4524 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -98,10 +98,10 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * * @note: Gaussian and hybrid Gaussian factors are not considered! * - * @param continuousValues Manifold values at which to compute the error. + * @param values Manifold values at which to compute the error. * @return AlgebraicDecisionTree */ - AlgebraicDecisionTree errorTree(const Values& continuousValues) const; + AlgebraicDecisionTree errorTree(const Values& values) const; /** * @brief Computer posterior P(M|X=x) when all continuous values X are given. diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 6bd9a22a87..14bef5fbb4 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -40,7 +40,8 @@ const DiscreteKey m(M(0), 2); const DiscreteValues m1Assignment{{M(0), 1}}; // Define a 50/50 prior on the mode -DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); +DiscreteConditional::shared_ptr mixing = + std::make_shared(m, "60/40"); /// Gaussian density function double Gaussian(double mu, double sigma, double z) { @@ -52,7 +53,8 @@ double Gaussian(double mu, double sigma, double z) { * If sigma0 == sigma1, it simplifies to a sigmoid function. * Hardcodes 60/40 prior on mode. */ -double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { const double p0 = 0.6 * Gaussian(mu0, sigma0, z); const double p1 = 0.4 * Gaussian(mu1, sigma1, z); return p1 / (p0 + p1); @@ -68,13 +70,15 @@ TEST(GaussianMixture, GaussianMixtureModel) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}}; + std::vector> parameters{{Vector1(mu0), sigma}, + {Vector1(mu1), sigma}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto eliminationResult = gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential(); + auto eliminationResult = + gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential(); auto pMid = *eliminationResult->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); @@ -84,7 +88,8 @@ TEST(GaussianMixture, GaussianMixtureModel) { const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); // Workflow 1: convert HBN to HFG and solve - auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto eliminationResult1 = + gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); @@ -109,7 +114,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; + std::vector> parameters{{Vector1(mu0), sigma0}, + {Vector1(mu1), sigma1}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); @@ -119,15 +125,18 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const VectorValues vv{{Z(0), Vector1(zMax)}}; auto gfg = gmm.toFactorGraph(vv); - // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) + // Equality of posteriors asserts that the elimination is correct (same ratios + // for all modes) const auto& expectedDiscretePosterior = gmm.discretePosterior(vv); EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // Eliminate the graph! auto eliminationResultMax = gfg.eliminateSequential(); - // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, eliminationResultMax->discretePosterior(vv))); + // Equality of posteriors asserts that the elimination is correct (same ratios + // for all modes) + EXPECT(assert_equal(expectedDiscretePosterior, + eliminationResultMax->discretePosterior(vv))); auto pMax = *eliminationResultMax->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); @@ -138,7 +147,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); // Workflow 1: convert HBN to HFG and solve - auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto eliminationResult1 = + gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 438bfd2670..d4192cb712 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -168,8 +168,10 @@ TEST(HybridBayesNet, Tiny) { EXPECT(!pruned.equals(bayesNet)); // error - const double error0 = chosen0.error(vv) + gc0->negLogConstant() - px->negLogConstant() - log(0.4); - const double error1 = chosen1.error(vv) + gc1->negLogConstant() - px->negLogConstant() - log(0.6); + const double error0 = chosen0.error(vv) + gc0->negLogConstant() - + px->negLogConstant() - log(0.4); + const double error1 = chosen1.error(vv) + gc1->negLogConstant() - + px->negLogConstant() - log(0.6); // print errors: EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9); EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9); @@ -190,7 +192,6 @@ TEST(HybridBayesNet, Tiny) { // toFactorGraph auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); - GTSAM_PRINT(fg); // Create the product factor for eliminating x0: HybridGaussianFactorGraph factors_x0; @@ -204,9 +205,7 @@ TEST(HybridBayesNet, Tiny) { // Call eliminate and check scalar: auto result = factors_x0.eliminate({X(0)}); - GTSAM_PRINT(*result.first); auto df = std::dynamic_pointer_cast(result.second); - GTSAM_PRINT(df->errorTree()); // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); @@ -228,13 +227,17 @@ TEST(HybridBayesNet, Tiny) { /* ****************************************************************************/ // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). namespace different_sigmas { -const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); +const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), + Vector1(-4.0), 5.0); -const std::vector> parms{{Vector1(5), 2.0}, {Vector1(2), 3.0}}; +const std::vector> parms{{Vector1(5), 2.0}, + {Vector1(2), 3.0}}; const auto hgc = std::make_shared(Asia, X(1), parms); const auto prior = std::make_shared(Asia, "99/1"); -auto wrap = [](const auto& c) { return std::make_shared(c); }; +auto wrap = [](const auto& c) { + return std::make_shared(c); +}; const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)}; // Create values at which to evaluate. @@ -248,8 +251,8 @@ TEST(HybridBayesNet, evaluateHybrid) { const double conditionalProbability = gc->evaluate(values.continuous()); const double mixtureProbability = hgc->evaluate(values); - EXPECT_DOUBLES_EQUAL( - conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); + EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, + bayesNet.evaluate(values), 1e-9); } /* ****************************************************************************/ @@ -271,10 +274,14 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); - EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); - EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); - EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), + *gbn.at(0))); + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), + *gbn.at(1))); + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), + *gbn.at(2))); + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), + *gbn.at(3))); } /* ****************************************************************************/ @@ -311,7 +318,8 @@ TEST(HybridBayesNet, OptimizeAssignment) { TEST(HybridBayesNet, Optimize) { Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); - HybridBayesNet::shared_ptr hybridBayesNet = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr hybridBayesNet = + s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); @@ -338,7 +346,8 @@ TEST(HybridBayesNet, Pruning) { // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) Switching s(3); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(5, posterior->size()); // Optimize @@ -364,9 +373,12 @@ TEST(HybridBayesNet, Pruning) { logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); - logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); - logProbability += posterior->at(4)->asDiscrete()->logProbability(hybridValues); - EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), 1e-9); + logProbability += + posterior->at(3)->asDiscrete()->logProbability(hybridValues); + logProbability += + posterior->at(4)->asDiscrete()->logProbability(hybridValues); + EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), + 1e-9); // Check agreement with discrete posterior // double density = exp(logProbability); @@ -387,7 +399,8 @@ TEST(HybridBayesNet, Pruning) { TEST(HybridBayesNet, Prune) { Switching s(4); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); HybridValues delta = posterior->optimize(); @@ -404,7 +417,8 @@ TEST(HybridBayesNet, Prune) { TEST(HybridBayesNet, UpdateDiscreteConditionals) { Switching s(4); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); DiscreteConditional joint; @@ -416,7 +430,8 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { auto prunedDecisionTree = joint.prune(maxNrLeaves); #ifdef GTSAM_DT_MERGING - EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree.nrLeaves()); + EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, + prunedDecisionTree.nrLeaves()); #else EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves()); #endif @@ -424,14 +439,16 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { // regression // NOTE(Frank): I had to include *three* non-zeroes here now. DecisionTreeFactor::ADT potentials( - s.modes, std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); + s.modes, + std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); DiscreteConditional expectedConditional(3, s.modes, potentials); // Prune! auto pruned = posterior->prune(maxNrLeaves); // Functor to verify values against the expectedConditional - auto checker = [&](const Assignment& assignment, double probability) -> double { + auto checker = [&](const Assignment& assignment, + double probability) -> double { // typecast so we can use this to get probability value DiscreteValues choices(assignment); if (prunedDecisionTree(choices) == 0) { @@ -446,7 +463,8 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { CHECK(pruned.at(0)->asDiscrete()); auto pruned_discrete_conditionals = pruned.at(0)->asDiscrete(); auto discrete_conditional_tree = - std::dynamic_pointer_cast(pruned_discrete_conditionals); + std::dynamic_pointer_cast( + pruned_discrete_conditionals); // The checker functor verifies the values for us. discrete_conditional_tree->apply(checker); @@ -460,10 +478,13 @@ TEST(HybridBayesNet, Sampling) { auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); nfg.emplace_shared>(X(0), 0.0, noise_model); - auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); - auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); + auto zero_motion = + std::make_shared>(X(0), X(1), 0, noise_model); + auto one_motion = + std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( - DiscreteKey(M(0), 2), std::vector{zero_motion, one_motion}); + DiscreteKey(M(0), 2), + std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); @@ -535,17 +556,18 @@ TEST(HybridBayesNet, ErrorTreeWithConditional) { hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); // Add measurement P(z0 | x0) - hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); // Add hybrid motion model double mu = 0.0; double sigma0 = 1e2, sigma1 = 1e-2; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = - make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model0), - c1 = - make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); + auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model1); DiscreteKey m1(M(2), 2); hbn.emplace_shared(m1, std::vector{c0, c1}); diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index f31846cb3a..319c6d52b0 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -59,7 +59,7 @@ std::vector components(Key key) { return {std::make_shared(key, I_3x3, Z_3x1), std::make_shared(key, I_3x3, Vector3::Ones())}; } -} // namespace two +} // namespace two /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { @@ -142,9 +142,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { } /* ************************************************************************* */ -void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, - const HybridBayesTree::shared_ptr &hbt, - const Ordering &ordering) { +void dotPrint(const HybridGaussianFactorGraph::shared_ptr& hfg, + const HybridBayesTree::shared_ptr& hbt, + const Ordering& ordering) { DotWriter dw; dw.positionHints['c'] = 2; dw.positionHints['x'] = 1; @@ -179,13 +179,15 @@ TEST(HybridGaussianFactorGraph, Switching) { std::vector naturalX(N); std::iota(naturalX.begin(), naturalX.end(), 1); std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); + std::transform( + naturalX.begin(), naturalX.end(), std::back_inserter(ordX), [](int x) { + return X(x); + }); auto [ndX, lvls] = makeBinaryOrdering(ordX); std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); // TODO(dellaert): this has no effect! - for (auto &l : lvls) { + for (auto& l : lvls) { l = -l; } } @@ -193,8 +195,10 @@ TEST(HybridGaussianFactorGraph, Switching) { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); + std::transform( + naturalC.begin(), naturalC.end(), std::back_inserter(ordC), [](int x) { + return M(x); + }); // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); const auto [ndC, lvls] = makeBinaryOrdering(ordC); @@ -233,13 +237,15 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { std::vector naturalX(N); std::iota(naturalX.begin(), naturalX.end(), 1); std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); + std::transform( + naturalX.begin(), naturalX.end(), std::back_inserter(ordX), [](int x) { + return X(x); + }); auto [ndX, lvls] = makeBinaryOrdering(ordX); std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); // TODO(dellaert): this has no effect! - for (auto &l : lvls) { + for (auto& l : lvls) { l = -l; } } @@ -247,8 +253,10 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); + std::transform( + naturalC.begin(), naturalC.end(), std::back_inserter(ordC), [](int x) { + return M(x); + }); // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); const auto [ndC, lvls] = makeBinaryOrdering(ordC); @@ -408,8 +416,7 @@ TEST(HybridBayesTree, OptimizeAssignment) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) - ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); @@ -451,21 +458,20 @@ TEST(HybridBayesTree, Optimize) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) - ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteFactorGraph dfg; - for (auto &&f : *remainingFactorGraph) { + for (auto&& f : *remainingFactorGraph) { auto discreteFactor = dynamic_pointer_cast(f); assert(discreteFactor); dfg.push_back(discreteFactor); } // Add the probabilities for each branch - DiscreteKeys discrete_keys = {m0, m1, m2}; + DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, 0.037152205, 0.12248971, 0.07349729, 0.08}; dfg.emplace_shared(discrete_keys, probs); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 80812a3d8b..6206e5f459 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -37,8 +37,6 @@ // Include for test suite #include -#include - #include "Switching.h" using namespace std; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 5631ac3219..e29c485afd 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -51,8 +51,10 @@ namespace equal_constants { // Create a simple HybridGaussianConditional const double commonSigma = 2.0; const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + commonSigma), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + commonSigma)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace equal_constants @@ -77,8 +79,8 @@ TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL( - conditionals[mode]->logProbability(vv), hybrid_conditional.logProbability(hv), 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv), + hybrid_conditional.logProbability(hv), 1e-8); } } @@ -90,7 +92,8 @@ TEST(HybridGaussianConditional, Error) { // Check result. DiscreteKeys discrete_keys{mode}; - std::vector leaves = {conditionals[0]->error(vv), conditionals[1]->error(vv)}; + std::vector leaves = {conditionals[0]->error(vv), + conditionals[1]->error(vv)}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -98,7 +101,8 @@ TEST(HybridGaussianConditional, Error) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), hybrid_conditional.error(hv), 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), + hybrid_conditional.error(hv), 1e-8); } } @@ -113,8 +117,10 @@ TEST(HybridGaussianConditional, Likelihood) { // Check that the hybrid conditional error and the likelihood error are the // same. - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check that likelihood error is as expected, i.e., just the errors of the // individual likelihoods, in the `equal_constants` case. @@ -128,7 +134,8 @@ TEST(HybridGaussianConditional, Likelihood) { std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -138,8 +145,10 @@ namespace mode_dependent_constants { // Create a HybridGaussianConditional with mode-dependent noise models. // 0 is low-noise, 1 is high-noise. const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 0.5), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + 0.5), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + 3.0)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace mode_dependent_constants @@ -171,8 +180,9 @@ TEST(HybridGaussianConditional, Error2) { // Expected error is e(X) + log(sqrt(|2πΣ|)). // We normalize log(sqrt(|2πΣ|)) with min(negLogConstant) // so it is non-negative. - std::vector leaves = {conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, - conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; + std::vector leaves = { + conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, + conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -180,10 +190,10 @@ TEST(HybridGaussianConditional, Error2) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL( - conditionals[mode]->error(vv) + conditionals[mode]->negLogConstant() - minErrorConstant, - hybrid_conditional.error(hv), - 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + + conditionals[mode]->negLogConstant() - + minErrorConstant, + hybrid_conditional.error(hv), 1e-8); } } @@ -198,8 +208,10 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check that the hybrid conditional error and the likelihood error are as // expected, this invariant is the same as the equal noise case: - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check the detailed JacobianFactor calculation for mode==1. { @@ -208,18 +220,20 @@ TEST(HybridGaussianConditional, Likelihood2) { const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); - // Check that the JacobianFactor error with constants is equal to the conditional error: - EXPECT_DOUBLES_EQUAL( - hybrid_conditional.error(hv1), - jf1->error(hv1) + conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant(), - 1e-8); + // Check that the JacobianFactor error with constants is equal to the + // conditional error: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), + jf1->error(hv1) + conditionals[1]->negLogConstant() - + hybrid_conditional.negLogConstant(), + 1e-8); } // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -232,7 +246,8 @@ TEST(HybridGaussianConditional, Prune) { DiscreteKeys modes{{M(1), 2}, {M(2), 2}}; std::vector gcs; for (size_t i = 0; i < 4; i++) { - gcs.push_back(GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); + gcs.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); } auto empty = std::make_shared(); HybridGaussianConditional::Conditionals conditionals(modes, gcs); @@ -252,14 +267,8 @@ TEST(HybridGaussianConditional, Prune) { } } { - const std::vector potentials{0, - 0, - 0.5, - 0, // - 0, - 0, - 0.5, - 0}; + const std::vector potentials{0, 0, 0.5, 0, // + 0, 0, 0.5, 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); @@ -268,14 +277,8 @@ TEST(HybridGaussianConditional, Prune) { EXPECT_LONGS_EQUAL(2, pruned->nrComponents()); } { - const std::vector potentials{0.2, - 0, - 0.3, - 0, // - 0, - 0, - 0.5, - 0}; + const std::vector potentials{0.2, 0, 0.3, 0, // + 0, 0, 0.5, 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 3b5a6a80c5..9fdc1aaea5 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -46,7 +46,6 @@ #include "Switching.h" #include "TinyHybridExample.h" -#include "gtsam/linear/GaussianFactorGraph.h" using namespace std; using namespace gtsam; @@ -74,7 +73,8 @@ TEST(HybridGaussianFactorGraph, Creation) { HybridGaussianConditional gm( m0, {std::make_shared(X(0), Z_3x1, I_3x3, X(1), I_3x3), - std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); + std::make_shared( + X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -118,7 +118,8 @@ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { auto factor = std::dynamic_pointer_cast(result.second); CHECK(factor); // regression test - EXPECT(assert_equal(DecisionTreeFactor{m1, "15.74961 15.74961"}, *factor, 1e-5)); + EXPECT( + assert_equal(DecisionTreeFactor{m1, "15.74961 15.74961"}, *factor, 1e-5)); } /* ************************************************************************* */ @@ -180,7 +181,8 @@ TEST(HybridBayesNet, Switching) { EXPECT_LONGS_EQUAL(4, graph.size()); // Create some continuous and discrete values - const VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; + const VectorValues continuousValues{{X(0), Vector1(0.1)}, + {X(1), Vector1(1.2)}}; const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; // Get the hybrid gaussian factor and check it is as expected @@ -213,7 +215,8 @@ TEST(HybridBayesNet, Switching) { // Check errorTree AlgebraicDecisionTree actualErrors = graph.errorTree(continuousValues); // Create expected error tree - const AlgebraicDecisionTree expectedErrors(M(0), expectedError0, expectedError1); + const AlgebraicDecisionTree expectedErrors( + M(0), expectedError0, expectedError1); // Check that the actual error tree matches the expected one EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5)); @@ -226,9 +229,11 @@ TEST(HybridBayesNet, Switching) { EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); // Check discretePosterior - const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(continuousValues); + const AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(continuousValues); const double sum = probPrime0 + probPrime1; - const AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); + const AlgebraicDecisionTree expectedPosterior( + M(0), probPrime0 / sum, probPrime1 / sum); EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5)); // Make the clique of factors connected to x0: @@ -267,29 +272,37 @@ TEST(HybridBayesNet, Switching) { EXPECT(std::dynamic_pointer_cast(factor)); auto phi_x1_m = std::dynamic_pointer_cast(factor); EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0 - // Check that the scalars incorporate the negative log constant of the conditional - EXPECT_DOUBLES_EQUAL( - scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(), (*phi_x1_m)(modeZero).second, 1e-9); - EXPECT_DOUBLES_EQUAL( - scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(), (*phi_x1_m)(modeOne).second, 1e-9); - - // Check that the conditional and remaining factor are consistent for both modes + // Check that the scalars incorporate the negative log constant of the + // conditional + EXPECT_DOUBLES_EQUAL(scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(), + (*phi_x1_m)(modeZero).second, + 1e-9); + EXPECT_DOUBLES_EQUAL(scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(), + (*phi_x1_m)(modeOne).second, + 1e-9); + + // Check that the conditional and remaining factor are consistent for both + // modes for (auto&& mode : {modeZero, modeOne}) { const auto gc = (*p_x0_given_x1_m)(mode); const auto [gf, scalar] = (*phi_x1_m)(mode); - // The error of the original factors should equal the sum of errors of the conditional and - // remaining factor, modulo the normalization constant of the conditional. + // The error of the original factors should equal the sum of errors of the + // conditional and remaining factor, modulo the normalization constant of + // the conditional. double originalError = factors_x0.error({continuousValues, mode}); - const double actualError = - gc->negLogConstant() + gc->error(continuousValues) + gf->error(continuousValues) + scalar; + const double actualError = gc->negLogConstant() + + gc->error(continuousValues) + + gf->error(continuousValues) + scalar; EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9); } // Create a clique for x1 HybridGaussianFactorGraph factors_x1; - factors_x1.push_back(factor); // Use the remaining factor from previous elimination - factors_x1.push_back(graph.at(2)); // Add the factor for x1 from the original graph + factors_x1.push_back( + factor); // Use the remaining factor from previous elimination + factors_x1.push_back( + graph.at(2)); // Add the factor for x1 from the original graph // Test collectProductFactor for x1 clique auto productFactor_x1 = factors_x1.collectProductFactor(); @@ -323,16 +336,18 @@ TEST(HybridBayesNet, Switching) { auto phi_x1 = std::dynamic_pointer_cast(factor_x1); CHECK(phi_x1); EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0 - // We can't really check the error of the decision tree factor phi_x1, because the continuos - // factor whose error(kEmpty) we need is not available.. + // We can't really check the error of the decision tree factor phi_x1, because + // the continuos factor whose error(kEmpty) we need is not available.. - // However, we can still check the total error for the clique factors_x1 and the elimination - // results are equal, modulo -again- the negative log constant of the conditional. + // However, we can still check the total error for the clique factors_x1 and + // the elimination results are equal, modulo -again- the negative log constant + // of the conditional. for (auto&& mode : {modeZero, modeOne}) { auto gc_x1 = (*p_x1_given_m)(mode); double originalError_x1 = factors_x1.error({continuousValues, mode}); - const double actualError = - gc_x1->negLogConstant() + gc_x1->error(continuousValues) + phi_x1->error(mode); + const double actualError = gc_x1->negLogConstant() + + gc_x1->error(continuousValues) + + phi_x1->error(mode); EXPECT_DOUBLES_EQUAL(originalError_x1, actualError, 1e-9); } @@ -340,9 +355,11 @@ TEST(HybridBayesNet, Switching) { auto hybridBayesNet = graph.eliminateSequential(); CHECK(hybridBayesNet); - // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the - // same posterior from the graph. This is a sanity check that the elimination is done correctly. - AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(continuousValues); + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the + // same as the same posterior from the graph. This is a sanity check that the + // elimination is done correctly. + AlgebraicDecisionTree bnPosterior = + hybridBayesNet->discretePosterior(continuousValues); EXPECT(assert_equal(graphPosterior, bnPosterior)); } @@ -367,9 +384,11 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { // Check that the probability prime is the exponential of the error EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7)); - // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the - // same posterior from the graph. This is a sanity check that the elimination is done correctly. - const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the + // same as the same posterior from the graph. This is a sanity check that the + // elimination is done correctly. + const AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(delta.continuous()); const AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); EXPECT(assert_equal(graphPosterior, bnPosterior)); @@ -491,7 +510,8 @@ TEST(HybridGaussianFactorGraph, Conditionals) { expected_continuous.insert(X(1), 1); expected_continuous.insert(X(2), 2); expected_continuous.insert(X(3), 4); - Values result_continuous = switching.linearizationPoint.retract(result.continuous()); + Values result_continuous = + switching.linearizationPoint.retract(result.continuous()); EXPECT(assert_equal(expected_continuous, result_continuous)); DiscreteValues expected_discrete; @@ -519,8 +539,10 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { // Check discrete posterior at optimum HybridValues delta = hybridBayesNet->optimize(); - AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); - AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); + AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(delta.continuous()); + AlgebraicDecisionTree bnPosterior = + hybridBayesNet->discretePosterior(delta.continuous()); EXPECT(assert_equal(graphPosterior, bnPosterior)); graph = HybridGaussianFactorGraph(); @@ -579,11 +601,9 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { /* ****************************************************************************/ // Check that the factor graph unnormalized probability is proportional to the // Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet& bn, - const VectorValues& measurements, - const HybridGaussianFactorGraph& fg, - size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues* sample) -> double { +bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, + const HybridGaussianFactorGraph &fg, size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues *sample) -> double { sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / fg.probPrime(*sample); }; @@ -602,11 +622,9 @@ bool ratioTest(const HybridBayesNet& bn, /* ****************************************************************************/ // Check that the bayes net unnormalized probability is proportional to the // Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet& bn, - const VectorValues& measurements, - const HybridBayesNet& posterior, - size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues* sample) -> double { +bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, + const HybridBayesNet &posterior, size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues *sample) -> double { sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / posterior.evaluate(*sample); }; @@ -640,10 +658,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843), - conditional1 = - std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759); + const auto conditional0 = std::make_shared( + X(0), Vector1(14.1421), I_1x1 * 2.82843), + conditional1 = std::make_shared( + X(0), Vector1(10.1379), I_1x1 * 2.02759); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -671,7 +689,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { bn.emplace_shared(m1, Z(0), I_1x1, X(0), parms); // Create prior on X(0). - bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on m1. bn.emplace_shared(m1, "1/1"); @@ -689,10 +708,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // Create hybrid Gaussian factor on X(0). // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759), - conditional1 = - std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843); + const auto conditional0 = std::make_shared( + X(0), Vector1(10.1379), I_1x1 * 2.02759), + conditional1 = std::make_shared( + X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( m1, std::vector{conditional0, conditional1}); @@ -725,10 +744,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(17.3205), I_1x1 * 3.4641), - conditional1 = - std::make_shared(X(0), Vector1(10.274), I_1x1 * 2.0548); + const auto conditional0 = std::make_shared( + X(0), Vector1(17.3205), I_1x1 * 3.4641), + conditional1 = std::make_shared( + X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -772,25 +791,27 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // NOTE: we add reverse topological so we can sample from the Bayes net.: // Add measurements: - std::vector> measurementModels{{Z_1x1, 3}, {Z_1x1, 0.5}}; + std::vector> measurementModels{{Z_1x1, 3}, + {Z_1x1, 0.5}}; for (size_t t : {0, 1, 2}) { // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_shared( - noise_mode_t, Z(t), I_1x1, X(t), measurementModels); + bn.emplace_shared(noise_mode_t, Z(t), I_1x1, + X(t), measurementModels); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); } // Add motion models. TODO(frank): why are they exactly the same? - std::vector> motionModels{{Z_1x1, 0.2}, {Z_1x1, 0.2}}; + std::vector> motionModels{{Z_1x1, 0.2}, + {Z_1x1, 0.2}}; for (size_t t : {2, 1}) { // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - bn.emplace_shared( - motion_model_t, X(t), I_1x1, X(t - 1), motionModels); + bn.emplace_shared(motion_model_t, X(t), I_1x1, + X(t - 1), motionModels); // Create prior on motion model M(t): bn.emplace_shared(motion_model_t, "40/60"); @@ -803,7 +824,8 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size()); // Create measurements consistent with moving right every time: - const VectorValues measurements{{Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; + const VectorValues measurements{ + {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; const HybridGaussianFactorGraph fg = bn.toFactorGraph(measurements); // Factor graph is: diff --git a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp index 017df14a55..f41c5f0aa2 100644 --- a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp @@ -54,12 +54,15 @@ const auto f22 = std::make_shared(X(1), A1, X(3), A3, b); const HybridGaussianFactor hybridFactorB(m2, {{f20, 20}, {f21, 21}, {f22, 22}}); // Simulate a pruned hybrid factor, in this case m2==1 is nulled out. -const HybridGaussianFactor prunedFactorB(m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}}); +const HybridGaussianFactor prunedFactorB( + m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}}); } // namespace examples /* ************************************************************************* */ // Constructor -TEST(HybridGaussianProductFactor, Construct) { HybridGaussianProductFactor product; } +TEST(HybridGaussianProductFactor, Construct) { + HybridGaussianProductFactor product; +} /* ************************************************************************* */ // Add two Gaussian factors and check only one leaf in tree diff --git a/gtsam/hybrid/tests/testHybridMotionModel.cpp b/gtsam/hybrid/tests/testHybridMotionModel.cpp index bcffbe6282..16a2bd476d 100644 --- a/gtsam/hybrid/tests/testHybridMotionModel.cpp +++ b/gtsam/hybrid/tests/testHybridMotionModel.cpp @@ -46,25 +46,25 @@ using symbol_shorthand::Z; DiscreteKey m1(M(1), 2); -void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { +void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); - hbn.emplace_shared( - z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model); + hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, + -I_1x1, measurement_model); } /// Create hybrid motion model p(x1 | x0, m1) -static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(double mu0, - double mu1, - double sigma0, - double sigma1) { +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( + double mu0, double mu1, double sigma0, double sigma1) { std::vector> motionModels{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; - return std::make_shared(m1, X(1), I_1x1, X(0), motionModels); + return std::make_shared(m1, X(1), I_1x1, X(0), + motionModels); } /// Create two state Bayes network with 1 or two measurement models -HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybridMotionModel, - bool add_second_measurement = false) { +HybridBayesNet CreateBayesNet( + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + bool add_second_measurement = false) { HybridBayesNet hbn; // Add measurement model p(z0 | x0) @@ -86,16 +86,15 @@ HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybri /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( - const HybridBayesNet& hbn, - const HybridGaussianConditional::shared_ptr& hybridMotionModel, - const VectorValues& given, - size_t N = 100000) { + const HybridBayesNet &hbn, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + const VectorValues &given, size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; q.push_back(hybridMotionModel); // Add hybrid motion model q.emplace_shared(GaussianConditional::FromMeanAndStddev( - X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 + X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 q.emplace_shared(m1, "50/50"); // Discrete prior. // Do importance sampling @@ -195,16 +194,20 @@ TEST(HybridGaussianFactor, TwoStateModel2) { HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential(); - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + // Equality of posteriors asserts that the factor graph is correct (same + // ratios for all modes) + EXPECT( + assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // This one asserts that HBN resulting from elimination is correct. - EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + EXPECT(assert_equal(expectedDiscretePosterior, + eliminated->discretePosterior(vv))); } // Importance sampling run with 100k samples gives 50.095/49.905 @@ -227,16 +230,20 @@ TEST(HybridGaussianFactor, TwoStateModel2) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + // Equality of posteriors asserts that the factor graph is correct (same + // ratios for all modes) + EXPECT( + assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // This one asserts that HBN resulting from elimination is correct. - EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + EXPECT(assert_equal(expectedDiscretePosterior, + eliminated->discretePosterior(vv))); } // Values taken from an importance sampling run with 100k samples: @@ -290,11 +297,13 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); @@ -318,11 +327,13 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 5f939e5893..b5486c6cdb 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -34,7 +34,6 @@ #include #include "Switching.h" -#include "Test.h" // Include for test suite #include