Skip to content

Commit

Permalink
Merge pull request #1839 from borglab/improved-api-3
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal authored Sep 23, 2024
2 parents 788b074 + 9b3176e commit fd7df61
Show file tree
Hide file tree
Showing 27 changed files with 177 additions and 175 deletions.
6 changes: 6 additions & 0 deletions gtsam/discrete/DiscreteConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -465,6 +465,12 @@ string DiscreteConditional::html(const KeyFormatter& keyFormatter,
double DiscreteConditional::evaluate(const HybridValues& x) const {
return this->evaluate(x.discrete());
}

/* ************************************************************************* */
double DiscreteConditional::negLogConstant() const {
return 0.0;
}

/* ************************************************************************* */

} // namespace gtsam
9 changes: 5 additions & 4 deletions gtsam/discrete/DiscreteConditional.h
Original file line number Diff line number Diff line change
Expand Up @@ -264,11 +264,12 @@ class GTSAM_EXPORT DiscreteConditional
}

/**
* logNormalizationConstant K is just zero, such that
* logProbability(x) = log(evaluate(x)) = - error(x)
* and hence error(x) = - log(evaluate(x)) > 0 for all x.
* negLogConstant is just zero, such that
* -logProbability(x) = -log(evaluate(x)) = error(x)
* and hence error(x) > 0 for all x.
* Thus -log(K) for the normalization constant k is 0.
*/
double logNormalizationConstant() const override { return 0.0; }
double negLogConstant() const override;

/// @}

Expand Down
2 changes: 1 addition & 1 deletion gtsam/discrete/discrete.i
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor {
const std::vector<double>& table);

// Standard interface
double logNormalizationConstant() const;
double negLogConstant() const;
double logProbability(const gtsam::DiscreteValues& values) const;
double evaluate(const gtsam::DiscreteValues& values) const;
double error(const gtsam::DiscreteValues& values) const;
Expand Down
10 changes: 5 additions & 5 deletions gtsam/hybrid/HybridConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -161,18 +161,18 @@ double HybridConditional::logProbability(const HybridValues &values) const {
}

/* ************************************************************************ */
double HybridConditional::logNormalizationConstant() const {
double HybridConditional::negLogConstant() const {
if (auto gc = asGaussian()) {
return gc->logNormalizationConstant();
return gc->negLogConstant();
}
if (auto gm = asHybrid()) {
return gm->logNormalizationConstant(); // 0.0!
return gm->negLogConstant(); // 0.0!
}
if (auto dc = asDiscrete()) {
return dc->logNormalizationConstant(); // 0.0!
return dc->negLogConstant(); // 0.0!
}
throw std::runtime_error(
"HybridConditional::logProbability: conditional type not handled");
"HybridConditional::negLogConstant: conditional type not handled");
}

/* ************************************************************************ */
Expand Down
5 changes: 3 additions & 2 deletions gtsam/hybrid/HybridConditional.h
Original file line number Diff line number Diff line change
Expand Up @@ -193,11 +193,12 @@ class GTSAM_EXPORT HybridConditional
double logProbability(const HybridValues& values) const override;

/**
* Return the log normalization constant.
* Return the negative log of the normalization constant.
* This shows up in the error as -(error(x) + negLogConstant)
* Note this is 0.0 for discrete and hybrid conditionals, but depends
* on the continuous parameters for Gaussian conditionals.
*/
double logNormalizationConstant() const override;
double negLogConstant() const override;

/// Return the probability (or density) of the underlying conditional.
double evaluate(const HybridValues& values) const override;
Expand Down
21 changes: 9 additions & 12 deletions gtsam/hybrid/HybridGaussianConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs(
// Check if conditional is pruned
if (conditional) {
// Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|))
value = -conditional->logNormalizationConstant();
value = conditional->negLogConstant();
}
return {std::dynamic_pointer_cast<GaussianFactor>(conditional), value};
};
Expand All @@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional(
discreteParents, GetFactorValuePairs(conditionals)),
BaseConditional(continuousFrontals.size()),
conditionals_(conditionals) {
// Calculate logConstant_ as the minimum of the log normalizers of the
// conditionals, by visiting the decision tree:
logConstant_ = std::numeric_limits<double>::infinity();
// Calculate negLogConstant_ as the minimum of the negative-log normalizers of
// the conditionals, by visiting the decision tree:
negLogConstant_ = std::numeric_limits<double>::infinity();
conditionals_.visit(
[this](const GaussianConditional::shared_ptr &conditional) {
if (conditional) {
this->logConstant_ = std::min(
this->logConstant_, -conditional->logNormalizationConstant());
this->negLogConstant_ =
std::min(this->negLogConstant_, conditional->negLogConstant());
}
});
}
Expand All @@ -84,8 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
// First check if conditional has not been pruned
if (gc) {
const double Cgm_Kgcm =
-this->logConstant_ - gc->logNormalizationConstant();
const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_;
// If there is a difference in the covariances, we need to account for
// that since the error is dependent on the mode.
if (Cgm_Kgcm > 0.0) {
Expand Down Expand Up @@ -156,8 +155,7 @@ void HybridGaussianConditional::print(const std::string &s,
std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), ";
}
std::cout << std::endl
<< " logNormalizationConstant: " << logNormalizationConstant()
<< std::endl
<< " logNormalizationConstant: " << -negLogConstant() << std::endl
<< std::endl;
conditionals_.print(
"", [&](Key k) { return formatter(k); },
Expand Down Expand Up @@ -215,8 +213,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
[&](const GaussianConditional::shared_ptr &conditional)
-> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm =
-logConstant_ - conditional->logNormalizationConstant();
const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_;
if (Cgm_Kgcm == 0.0) {
return {likelihood_m, 0.0};
} else {
Expand Down
14 changes: 10 additions & 4 deletions gtsam/hybrid/HybridGaussianConditional.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridGaussianConditional
Conditionals conditionals_; ///< a decision tree of Gaussian conditionals.
///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))).
///< Take advantage of the neg-log space so everything is a minimization
double logConstant_;
double negLogConstant_;

/**
* @brief Convert a HybridGaussianConditional of conditionals into
Expand Down Expand Up @@ -150,9 +150,15 @@ class GTSAM_EXPORT HybridGaussianConditional
/// Returns the continuous keys among the parents.
KeyVector continuousParents() const;

/// The log normalization constant is max of the the individual
/// log-normalization constants.
double logNormalizationConstant() const override { return -logConstant_; }
/**
* @brief Return log normalization constant in negative log space.
*
* The log normalization constant is the min of the individual
* log-normalization constants.
*
* @return double
*/
inline double negLogConstant() const override { return negLogConstant_; }

/**
* Create a likelihood factor for a hybrid Gaussian conditional,
Expand Down
35 changes: 18 additions & 17 deletions gtsam/hybrid/HybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -233,18 +233,18 @@ continuousElimination(const HybridGaussianFactorGraph &factors,

/* ************************************************************************ */
/**
* @brief Exponentiate log-values, not necessarily normalized, normalize, and
* return as AlgebraicDecisionTree<Key>.
* @brief Exponentiate (not necessarily normalized) negative log-values,
* normalize, and then return as AlgebraicDecisionTree<Key>.
*
* @param logValues DecisionTree of (unnormalized) log values.
* @return AlgebraicDecisionTree<Key>
*/
static AlgebraicDecisionTree<Key> probabilitiesFromLogValues(
static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues(
const AlgebraicDecisionTree<Key> &logValues) {
// Perform normalization
double max_log = logValues.max();
double min_log = logValues.min();
AlgebraicDecisionTree<Key> probabilities = DecisionTree<Key, double>(
logValues, [&max_log](const double x) { return exp(x - max_log); });
logValues, [&min_log](const double x) { return exp(-(x - min_log)); });
probabilities = probabilities.normalize(probabilities.sum());

return probabilities;
Expand All @@ -265,13 +265,13 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double {
if (!factor) return 0.0;
return -factor->error(VectorValues());
return factor->error(VectorValues());
};
AlgebraicDecisionTree<Key> logProbabilities =
DecisionTree<Key, double>(gmf->factors(), logProbability);

AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromLogValues(logProbabilities);
probabilitiesFromNegativeLogValues(logProbabilities);
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(),
probabilities);

Expand Down Expand Up @@ -321,23 +321,23 @@ using Result = std::pair<std::shared_ptr<GaussianConditional>,
static std::shared_ptr<Factor> createDiscreteFactor(
const DecisionTree<Key, Result> &eliminationResults,
const DiscreteKeys &discreteSeparator) {
auto logProbability = [&](const Result &pair) -> double {
auto negLogProbability = [&](const Result &pair) -> double {
const auto &[conditional, factor] = pair;
static const VectorValues kEmpty;
// If the factor is not null, it has no keys, just contains the residual.
if (!factor) return 1.0; // TODO(dellaert): not loving this.

// Logspace version of:
// Negative logspace version of:
// exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
// We take negative of the logNormalizationConstant `log(k)`
// to get `log(1/k) = log(\sqrt{|2πΣ|})`.
return -factor->error(kEmpty) - conditional->logNormalizationConstant();
// negLogConstant gives `-log(k)`
// which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`.
return factor->error(kEmpty) - conditional->negLogConstant();
};

AlgebraicDecisionTree<Key> logProbabilities(
DecisionTree<Key, double>(eliminationResults, logProbability));
AlgebraicDecisionTree<Key> negLogProbabilities(
DecisionTree<Key, double>(eliminationResults, negLogProbability));
AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromLogValues(logProbabilities);
probabilitiesFromNegativeLogValues(negLogProbabilities);

return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
}
Expand All @@ -355,8 +355,9 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
if (!hf) throw std::runtime_error("Expected HessianFactor!");
// Add 2.0 term since the constant term will be premultiplied by 0.5
// as per the Hessian definition
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant();
// as per the Hessian definition,
// and negative since we want log(k)
hf->constantTerm() += -2.0 * conditional->negLogConstant();
}
return {factor, 0.0};
};
Expand Down
2 changes: 1 addition & 1 deletion gtsam/hybrid/hybrid.i
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ virtual class HybridConditional {
size_t nrParents() const;

// Standard interface:
double logNormalizationConstant() const;
double negLogConstant() const;
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;
double operator()(const gtsam::HybridValues& values) const;
Expand Down
35 changes: 18 additions & 17 deletions gtsam/hybrid/tests/testHybridGaussianConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,11 @@ const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode,
TEST(HybridGaussianConditional, Invariants) {
using namespace equal_constants;

// Check that the conditional normalization constant is the max of all
// constants which are all equal, in this case, hence:
const double K = hybrid_conditional.logNormalizationConstant();
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
// Check that the conditional (negative log) normalization constant is the min
// of all constants which are all equal, in this case, hence:
const double K = hybrid_conditional.negLogConstant();
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->negLogConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->negLogConstant(), 1e-8);

EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0));
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1));
Expand Down Expand Up @@ -180,25 +180,26 @@ TEST(HybridGaussianConditional, Error2) {

// Check result.
DiscreteKeys discrete_keys{mode};
double logNormalizer0 = -conditionals[0]->logNormalizationConstant();
double logNormalizer1 = -conditionals[1]->logNormalizationConstant();
double minLogNormalizer = std::min(logNormalizer0, logNormalizer1);
double negLogConstant0 = conditionals[0]->negLogConstant();
double negLogConstant1 = conditionals[1]->negLogConstant();
double minErrorConstant = std::min(negLogConstant0, negLogConstant1);

// Expected error is e(X) + log(|2πΣ|).
// We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative.
// Expected error is e(X) + log(sqrt(|2πΣ|)).
// We normalize log(sqrt(|2πΣ|)) with min(negLogConstant)
// so it is non-negative.
std::vector<double> leaves = {
conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer,
conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer};
conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant,
conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant};
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);

EXPECT(assert_equal(expected, actual, 1e-6));

// Check for non-tree version.
for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}};
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) -
conditionals[mode]->logNormalizationConstant() -
minLogNormalizer,
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) +
conditionals[mode]->negLogConstant() -
minErrorConstant,
hybrid_conditional.error(hv), 1e-8);
}
}
Expand Down Expand Up @@ -230,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) {
CHECK(jf1->rows() == 2);

// Check that the constant C1 is properly encoded in the JacobianFactor.
const double C1 = hybrid_conditional.logNormalizationConstant() -
conditionals[1]->logNormalizationConstant();
const double C1 =
conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant();
const double c1 = std::sqrt(2.0 * C1);
Vector expected_unwhitened(2);
expected_unwhitened << 4.9 - 5.0, -c1;
Expand Down
5 changes: 2 additions & 3 deletions gtsam/hybrid/tests/testHybridGaussianFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -780,9 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph(
// Create HybridGaussianFactor
// We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{
{f0, -model0->logNormalizationConstant()},
{f1, -model1->logNormalizationConstant()}};
std::vector<GaussianFactorValuePair> factors{{f0, model0->negLogConstant()},
{f1, model1->negLogConstant()}};
HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);

HybridGaussianFactorGraph hfg;
Expand Down
5 changes: 2 additions & 3 deletions gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -902,9 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph(
// Create HybridNonlinearFactor
// We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<NonlinearFactorValuePair> factors{
{f0, -model0->logNormalizationConstant()},
{f1, -model1->logNormalizationConstant()}};
std::vector<NonlinearFactorValuePair> factors{{f0, model0->negLogConstant()},
{f1, model1->negLogConstant()}};

HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);

Expand Down
18 changes: 3 additions & 15 deletions gtsam/inference/Conditional-inst.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,16 +59,8 @@ double Conditional<FACTOR, DERIVEDCONDITIONAL>::evaluate(

/* ************************************************************************* */
template <class FACTOR, class DERIVEDCONDITIONAL>
double Conditional<FACTOR, DERIVEDCONDITIONAL>::logNormalizationConstant()
const {
throw std::runtime_error(
"Conditional::logNormalizationConstant is not implemented");
}

/* ************************************************************************* */
template <class FACTOR, class DERIVEDCONDITIONAL>
double Conditional<FACTOR, DERIVEDCONDITIONAL>::normalizationConstant() const {
return std::exp(logNormalizationConstant());
double Conditional<FACTOR, DERIVEDCONDITIONAL>::negLogConstant() const {
throw std::runtime_error("Conditional::negLogConstant is not implemented");
}

/* ************************************************************************* */
Expand All @@ -83,13 +75,9 @@ bool Conditional<FACTOR, DERIVEDCONDITIONAL>::CheckInvariants(
const double logProb = conditional.logProbability(values);
if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9)
return false; // logProb is not consistent with prob_or_density
if (std::abs(conditional.logNormalizationConstant() -
std::log(conditional.normalizationConstant())) > 1e-9)
return false; // log normalization constant is not consistent with
// normalization constant
const double error = conditional.error(values);
if (error < 0.0) return false; // prob_or_density is negative.
const double expected = conditional.logNormalizationConstant() - error;
const double expected = -(conditional.negLogConstant() + error);
if (std::abs(logProb - expected) > 1e-9)
return false; // logProb is not consistent with error
return true;
Expand Down
Loading

0 comments on commit fd7df61

Please sign in to comment.