Skip to content

Commit

Permalink
Merge pull request #1805 from borglab/direct-hybrid-fg
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal authored Sep 20, 2024
2 parents 63c4e33 + 67a8b8f commit 276747c
Show file tree
Hide file tree
Showing 11 changed files with 410 additions and 84 deletions.
2 changes: 0 additions & 2 deletions gtsam/discrete/DiscreteBayesNet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

#include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteLookupDAG.h>
#include <gtsam/inference/FactorGraph-inst.h>

namespace gtsam {
Expand Down
6 changes: 2 additions & 4 deletions gtsam/hybrid/HybridGaussianConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
} else {
// Add a constant to the likelihood in case the noise models
// are not all equal.
double c = 2.0 * Cgm_Kgcm;
return {likelihood_m, c};
return {likelihood_m, Cgm_Kgcm};
}
});
return std::make_shared<HybridGaussianFactor>(
Expand All @@ -232,8 +231,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(

/* ************************************************************************* */
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
std::set<DiscreteKey> s;
s.insert(discreteKeys.begin(), discreteKeys.end());
std::set<DiscreteKey> s(discreteKeys.begin(), discreteKeys.end());
return s;
}

Expand Down
25 changes: 14 additions & 11 deletions gtsam/hybrid/HybridGaussianFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ namespace gtsam {

/**
* @brief Helper function to augment the [A|b] matrices in the factor components
* with the normalizer values.
* This is done by storing the normalizer value in
* with the additional scalar values.
* This is done by storing the value in
* the `b` vector as an additional row.
*
* @param factors DecisionTree of GaussianFactors and arbitrary scalars.
Expand All @@ -46,31 +46,34 @@ HybridGaussianFactor::Factors augment(
AlgebraicDecisionTree<Key> valueTree;
std::tie(gaussianFactors, valueTree) = unzip(factors);

// Normalize
// Compute minimum value for normalization.
double min_value = valueTree.min();
AlgebraicDecisionTree<Key> values =
valueTree.apply([&min_value](double n) { return n - min_value; });

// Finally, update the [A|b] matrices.
auto update = [&values](const Assignment<Key> &assignment,
const HybridGaussianFactor::sharedFactor &gf) {
auto update = [&min_value](const GaussianFactorValuePair &gfv) {
auto [gf, value] = gfv;

auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf) return gf;
// If the log_normalizer is 0, do nothing
if (values(assignment) == 0.0) return gf;

double normalized_value = value - min_value;

// If the value is 0, do nothing
if (normalized_value == 0.0) return gf;

GaussianFactorGraph gfg;
gfg.push_back(jf);

Vector c(1);
c << std::sqrt(values(assignment));
// When hiding c inside the `b` vector, value == 0.5*c^2
c << std::sqrt(2.0 * normalized_value);
auto constantFactor = std::make_shared<JacobianFactor>(c);

gfg.push_back(constantFactor);
return std::dynamic_pointer_cast<GaussianFactor>(
std::make_shared<JacobianFactor>(gfg));
};
return gaussianFactors.apply(update);
return HybridGaussianFactor::Factors(factors, update);
}

/* *******************************************************************************/
Expand Down
9 changes: 9 additions & 0 deletions gtsam/hybrid/HybridGaussianFactor.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,15 @@ using GaussianFactorValuePair = std::pair<GaussianFactor::shared_ptr, double>;
* where the set of discrete variables indexes to
* the continuous gaussian distribution.
*
* In factor graphs the error function typically returns 0.5*|A*x - b|^2, i.e.,
* the negative log-likelihood for a Gaussian noise model.
* In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on
* the discrete assignment.
* For example, adding a 70/30 mode probability is supported by providing the
* scalars $-log(.7)$ and $-log(.3)$.
* Note that adding a common constant will not make any difference in the
* optimization, so $-log(70)$ and $-log(30)$ work just as well.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
Expand Down
15 changes: 13 additions & 2 deletions gtsam/hybrid/HybridNonlinearFactor.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ using NonlinearFactorValuePair = std::pair<NonlinearFactor::shared_ptr, double>;
* This class stores all factors as HybridFactors which can then be typecast to
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
* the correct operation.
*
* In factor graphs the error function typically returns 0.5*|h(x)-z|^2, i.e.,
* the negative log-likelihood for a Gaussian noise model.
* In hybrid factor graphs we allow *adding* an arbitrary scalar dependent on
* the discrete assignment.
* For example, adding a 70/30 mode probability is supported by providing the
* scalars $-log(.7)$ and $-log(.3)$.
* Note that adding a common constant will not make any difference in the
* optimization, so $-log(70)$ and $-log(30)$ work just as well.
*
* @ingroup hybrid
*/
class HybridNonlinearFactor : public HybridFactor {
public:
Expand Down Expand Up @@ -134,7 +145,7 @@ class HybridNonlinearFactor : public HybridFactor {
auto errorFunc =
[continuousValues](const std::pair<sharedFactor, double>& f) {
auto [factor, val] = f;
return factor->error(continuousValues) + (0.5 * val);
return factor->error(continuousValues) + val;
};
DecisionTree<Key, double> result(factors_, errorFunc);
return result;
Expand All @@ -153,7 +164,7 @@ class HybridNonlinearFactor : public HybridFactor {
auto [factor, val] = factors_(discreteValues);
// Compute the error for the selected factor
const double factorError = factor->error(continuousValues);
return factorError + (0.5 * val);
return factorError + val;
}

/**
Expand Down
9 changes: 6 additions & 3 deletions gtsam/hybrid/HybridValues.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,12 @@ HybridValues::HybridValues(const VectorValues& cv, const DiscreteValues& dv,
void HybridValues::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
std::cout << s << ": \n";
continuous_.print(" Continuous",
keyFormatter); // print continuous components
discrete_.print(" Discrete", keyFormatter); // print discrete components
// print continuous components
continuous_.print(" Continuous", keyFormatter);
// print discrete components
discrete_.print(" Discrete", keyFormatter);
// print nonlinear components
nonlinear_.print(" Nonlinear", keyFormatter);
}

/* ************************************************************************* */
Expand Down
175 changes: 170 additions & 5 deletions gtsam/hybrid/tests/testHybridGaussianFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,7 @@ namespace test_two_state_estimation {

DiscreteKey m1(M(1), 2);

void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) {
void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) {
auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma);
hbn.emplace_shared<GaussianConditional>(z_key, Vector1(0.0), I_1x1, x_key,
-I_1x1, measurement_model);
Expand All @@ -420,7 +420,7 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(

/// Create two state Bayes network with 1 or two measurement models
HybridBayesNet CreateBayesNet(
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
const HybridGaussianConditional::shared_ptr &hybridMotionModel,
bool add_second_measurement = false) {
HybridBayesNet hbn;

Expand All @@ -443,9 +443,9 @@ HybridBayesNet CreateBayesNet(

/// Approximate the discrete marginal P(m1) using importance sampling
std::pair<double, double> approximateDiscreteMarginal(
const HybridBayesNet& hbn,
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
const VectorValues& given, size_t N = 100000) {
const HybridBayesNet &hbn,
const HybridGaussianConditional::shared_ptr &hybridMotionModel,
const VectorValues &given, size_t N = 100000) {
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
/// using q(x0) = N(z0, sigmaQ) to sample x0.
HybridBayesNet q;
Expand Down Expand Up @@ -741,6 +741,171 @@ TEST(HybridGaussianFactor, TwoStateModel4) {
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}

namespace test_direct_factor_graph {
/**
* @brief Create a Factor Graph by directly specifying all
* the factors instead of creating conditionals first.
* This way we can directly provide the likelihoods and
* then perform linearization.
*
* @param values Initial values to linearize around.
* @param means The means of the HybridGaussianFactor components.
* @param sigmas The covariances of the HybridGaussianFactor components.
* @param m1 The discrete key.
* @return HybridGaussianFactorGraph
*/
static HybridGaussianFactorGraph CreateFactorGraph(
const gtsam::Values &values, const std::vector<double> &means,
const std::vector<double> &sigmas, DiscreteKey &m1,
double measurement_noise = 1e-3) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise);

auto f0 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0)
->linearize(values);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1)
->linearize(values);

// Create HybridGaussianFactor
// We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{
{f0, -model0->logNormalizationConstant()},
{f1, -model1->logNormalizationConstant()}};
HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);

HybridGaussianFactorGraph hfg;
hfg.push_back(motionFactor);

hfg.push_back(PriorFactor<double>(X(0), values.at<double>(X(0)), prior_noise)
.linearize(values));

return hfg;
}
} // namespace test_direct_factor_graph

/* ************************************************************************* */
/**
* @brief Test components with differing means but the same covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactor, DifferentMeansFG) {
using namespace test_direct_factor_graph;

DiscreteKey m1(M(1), 2);

Values values;
double x1 = 0.0, x2 = 1.75;
values.insert(X(0), x1);
values.insert(X(1), x2);

std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};

HybridGaussianFactorGraph hfg = CreateFactorGraph(values, means, sigmas, m1);

{
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();

HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
DiscreteValues{{M(1), 0}});

EXPECT(assert_equal(expected, actual));

DiscreteValues dv0{{M(1), 0}};
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));
// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);

DiscreteValues dv1{{M(1), 1}};
VectorValues cont1 = bn->optimize(dv1);
double error1 = bn->error(HybridValues(cont1, dv1));
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
}

{
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
hfg.push_back(
PriorFactor<double>(X(1), means[1], prior_noise).linearize(values));

auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();

HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
DiscreteValues{{M(1), 1}});

EXPECT(assert_equal(expected, actual));

{
DiscreteValues dv{{M(1), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
}
}
}

/* ************************************************************************* */
/**
* @brief Test components with differing covariances but the same means.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactor, DifferentCovariancesFG) {
using namespace test_direct_factor_graph;

DiscreteKey m1(M(1), 2);

Values values;
double x1 = 1.0, x2 = 1.0;
values.insert(X(0), x1);
values.insert(X(1), x2);

std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};

// Create FG with HybridGaussianFactor and prior on X1
HybridGaussianFactorGraph fg = CreateFactorGraph(values, means, sigmas, m1);
auto hbn = fg.eliminateSequential();

VectorValues cv;
cv.insert(X(0), Vector1(0.0));
cv.insert(X(1), Vector1(0.0));

// Check that the error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);

DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};

// regression
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);

DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());

EXPECT(assert_equal(expected_m1, actual_m1));
}

/* ************************************************************************* */
int main() {
TestResult tr;
Expand Down
Loading

0 comments on commit 276747c

Please sign in to comment.