Skip to content

Commit

Permalink
Merge pull request #1867 from borglab/normalize-potentials
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal authored Oct 10, 2024
2 parents def8b25 + 32108ab commit 9a146eb
Show file tree
Hide file tree
Showing 10 changed files with 40 additions and 24 deletions.
20 changes: 16 additions & 4 deletions gtsam/discrete/AlgebraicDecisionTree.h
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,21 @@ namespace gtsam {
this->root_ = DecisionTree<L, double>::convertFrom(other.root_, L_of_M, op);
}

/**
* @brief Create from an arbitrary DecisionTree<L, X> by operating on it
* with a functional `f`.
*
* @tparam X The type of the leaf of the original DecisionTree
* @tparam Func Type signature of functional `f`.
* @param other The original DecisionTree from which the
* AlgbraicDecisionTree is constructed.
* @param f Functional used to operate on
* the leaves of the input DecisionTree.
*/
template <typename X, typename Func>
AlgebraicDecisionTree(const DecisionTree<L, X>& other, Func f)
: Base(other, f) {}

/** sum */
AlgebraicDecisionTree operator+(const AlgebraicDecisionTree& g) const {
return this->apply(g, &Ring::add);
Expand Down Expand Up @@ -219,12 +234,9 @@ namespace gtsam {
* @brief Helper method to perform normalization such that all leaves in the
* tree sum to 1
*
* @param sum
* @return AlgebraicDecisionTree
*/
AlgebraicDecisionTree normalize(double sum) const {
return this->apply([&sum](const double& x) { return x / sum; });
}
AlgebraicDecisionTree normalize() const { return (*this) / this->sum(); }

/// Find the minimum values amongst all leaves
double min() const {
Expand Down
1 change: 0 additions & 1 deletion gtsam/discrete/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Install headers
set(subdir discrete)
file(GLOB discrete_headers "*.h")
# FIXME: exclude headers
install(FILES ${discrete_headers} DESTINATION include/gtsam/discrete)

# Add all tests
Expand Down
2 changes: 1 addition & 1 deletion gtsam/discrete/tests/testAlgebraicDecisionTree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,7 @@ TEST(ADT, Sum) {
TEST(ADT, Normalize) {
ADT a = exampleADT();
double sum = a.sum();
auto actual = a.normalize(sum);
auto actual = a.normalize();

DiscreteKey A(0, 2), B(1, 3), C(2, 2);
DiscreteKeys keys = DiscreteKeys{A, B, C};
Expand Down
1 change: 0 additions & 1 deletion gtsam/hybrid/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Install headers
set(subdir hybrid)
file(GLOB hybrid_headers "*.h")
# FIXME: exclude headers
install(FILES ${hybrid_headers} DESTINATION include/gtsam/hybrid)

# Add all tests
Expand Down
3 changes: 1 addition & 2 deletions gtsam/hybrid/HybridGaussianFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
auto errorFunc = [&continuousValues](const GaussianFactorValuePair& pair) {
return PotentiallyPrunedComponentError(pair, continuousValues);
};
DecisionTree<Key, double> error_tree(factors_, errorFunc);
return error_tree;
return {factors_, errorFunc};
}

/* *******************************************************************************/
Expand Down
15 changes: 10 additions & 5 deletions gtsam/hybrid/HybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -229,13 +229,18 @@ continuousElimination(const HybridGaussianFactorGraph &factors,
}

/* ************************************************************************ */
/// Take negative log-values, shift them so that the minimum value is 0, and
/// then exponentiate to create a DecisionTreeFactor (not normalized yet!).
/**
* @brief Take negative log-values, shift them so that the minimum value is 0,
* and then exponentiate to create a DecisionTreeFactor (not normalized yet!).
*
* @param errors DecisionTree of (unnormalized) errors.
* @return DecisionTreeFactor::shared_ptr
*/
static DecisionTreeFactor::shared_ptr DiscreteFactorFromErrors(
const DiscreteKeys &discreteKeys,
const AlgebraicDecisionTree<Key> &errors) {
double min_log = errors.min();
AlgebraicDecisionTree<Key> potentials = DecisionTree<Key, double>(
AlgebraicDecisionTree<Key> potentials(
errors, [&min_log](const double x) { return exp(-(x - min_log)); });
return std::make_shared<DecisionTreeFactor>(discreteKeys, potentials);
}
Expand All @@ -258,7 +263,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
if (!factor) return std::numeric_limits<double>::infinity();
return scalar + factor->error(kEmpty);
};
DecisionTree<Key, double> errors(gmf->factors(), calculateError);
AlgebraicDecisionTree<Key> errors(gmf->factors(), calculateError);
dfg.push_back(DiscreteFactorFromErrors(gmf->discreteKeys(), errors));

} else if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
Expand Down Expand Up @@ -307,7 +312,7 @@ static std::shared_ptr<Factor> createDiscreteFactor(
}
};

DecisionTree<Key, double> errors(eliminationResults, calculateError);
AlgebraicDecisionTree<Key> errors(eliminationResults, calculateError);
return DiscreteFactorFromErrors(discreteSeparator, errors);
}

Expand Down
3 changes: 1 addition & 2 deletions gtsam/hybrid/HybridNonlinearFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,7 @@ AlgebraicDecisionTree<Key> HybridNonlinearFactor::errorTree(
auto [factor, val] = f;
return factor->error(continuousValues) + val;
};
DecisionTree<Key, double> result(factors_, errorFunc);
return result;
return {factors_, errorFunc};
}

/* *******************************************************************************/
Expand Down
2 changes: 1 addition & 1 deletion gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ TEST(HybridBayesNet, Switching) {
CHECK(phi_x1);
EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0
// We can't really check the error of the decision tree factor phi_x1, because
// the continuous factor whose error(kEmpty) we need is not available..
// the continuous factor whose error(kEmpty) we need is not available.

// Now test full elimination of the graph:
auto hybridBayesNet = graph.eliminateSequential();
Expand Down
15 changes: 10 additions & 5 deletions gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,10 @@ TEST(HybridGaussianProductFactor, AsProductFactor) {
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);

// TODO(Frank): when killed hiding, f11 should also be there
mode[m1.first] = 1;
actual = product(mode);
EXPECT(actual.first.at(0) == f11);
EXPECT_DOUBLES_EQUAL(11, actual.second, 1e-9);
}

/* ************************************************************************* */
Expand All @@ -145,7 +148,10 @@ TEST(HybridGaussianProductFactor, AddOne) {
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);

// TODO(Frank): when killed hiding, f11 should also be there
mode[m1.first] = 1;
actual = product(mode);
EXPECT(actual.first.at(0) == f11);
EXPECT_DOUBLES_EQUAL(11, actual.second, 1e-9);
}

/* ************************************************************************* */
Expand All @@ -166,9 +172,8 @@ TEST(HybridGaussianProductFactor, AddTwo) {
EXPECT_DOUBLES_EQUAL(10 + 20, actual00.second, 1e-9);

auto actual12 = product({{M(1), 1}, {M(2), 2}});
// TODO(Frank): when killed hiding, these should also equal:
// EXPECT(actual12.first.at(0) == f11);
// EXPECT(actual12.first.at(1) == f22);
EXPECT(actual12.first.at(0) == f11);
EXPECT(actual12.first.at(1) == f22);
EXPECT_DOUBLES_EQUAL(11 + 22, actual12.second, 1e-9);
}

Expand Down
2 changes: 0 additions & 2 deletions gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -973,8 +973,6 @@ TEST(HybridNonlinearFactorGraph, DifferentMeans) {
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));

// TODO(Varun) Perform importance sampling to estimate error?

// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);

Expand Down

0 comments on commit 9a146eb

Please sign in to comment.