From 6a92db62c38c8725b238601b17b00b21b2ca1a92 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 00:24:18 -0400 Subject: [PATCH 1/7] rename GaussianMixtureFactor to HybridGaussianFactor --- doc/Hybrid.lyx | 4 +- gtsam/hybrid/GaussianMixture.cpp | 10 ++--- gtsam/hybrid/GaussianMixture.h | 4 +- gtsam/hybrid/GaussianMixtureFactor.cpp | 22 +++++----- gtsam/hybrid/GaussianMixtureFactor.h | 22 +++++----- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 22 +++++----- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +-- gtsam/hybrid/MixtureFactor.h | 8 ++-- gtsam/hybrid/hybrid.i | 14 +++---- gtsam/hybrid/tests/Switching.h | 4 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 2 +- .../tests/testGaussianMixtureFactor.cpp | 40 +++++++++---------- gtsam/hybrid/tests/testHybridEstimation.cpp | 6 +-- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 22 +++++----- .../tests/testHybridNonlinearFactorGraph.cpp | 4 +- .../hybrid/tests/testSerializationHybrid.cpp | 22 +++++----- python/gtsam/tests/test_HybridFactorGraph.py | 6 +-- 20 files changed, 112 insertions(+), 112 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index 44df81e397..b5901d4953 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -548,13 +548,13 @@ with \end_layout \begin_layout Subsubsection* -GaussianMixtureFactor +HybridGaussianFactor \end_layout \begin_layout Standard Analogously, a \emph on -GaussianMixtureFactor +HybridGaussianFactor \emph default typically results from a GaussianMixture by having known values \begin_inset Formula $\bar{x}$ diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index ac1956d5f6..feb49b6a23 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -72,7 +72,7 @@ GaussianMixture::GaussianMixture( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// GaussianMixtureFactor, no? +// HybridGaussianFactor, no? GaussianFactorGraphTree GaussianMixture::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr GaussianMixture::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr GaussianMixture::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const GaussianMixtureFactor::Factors likelihoods( + const HybridGaussianFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr GaussianMixture::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 714c00272a..10feaf55bf 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -23,7 +23,7 @@ #include #include #include -#include +#include #include #include #include @@ -165,7 +165,7 @@ class GTSAM_EXPORT GaussianMixture * Create a likelihood factor for a Gaussian mixture, return a Mixture factor * on the parents. */ - std::shared_ptr likelihood( + std::shared_ptr likelihood( const VectorValues &given) const; /// Getter for the underlying Conditionals DecisionTree diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 94bc094079..4445d875f3 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixtureFactor.cpp + * @file HybridGaussianFactor.cpp * @brief A set of Gaussian factors indexed by a set of discrete keys. * @author Fan Jiang * @author Varun Agrawal @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -29,13 +29,13 @@ namespace gtsam { /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, +HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors) : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ -bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -52,10 +52,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { } /* *******************************************************************************/ -void GaussianMixtureFactor::print(const std::string &s, +void HybridGaussianFactor::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); - std::cout << "GaussianMixtureFactor" << std::endl; + std::cout << "HybridGaussianFactor" << std::endl; HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { @@ -78,13 +78,13 @@ void GaussianMixtureFactor::print(const std::string &s, } /* *******************************************************************************/ -GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()( +HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( const DiscreteValues &assignment) const { return factors_(assignment); } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixtureFactor::add( +GaussianFactorGraphTree HybridGaussianFactor::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; auto add = [](const Y &graph1, const Y &graph2) { @@ -97,14 +97,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add( } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree() +GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; return {factors_, wrap}; } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixtureFactor::errorTree( +AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const sharedFactor &gf) { @@ -115,7 +115,7 @@ AlgebraicDecisionTree GaussianMixtureFactor::errorTree( } /* *******************************************************************************/ -double GaussianMixtureFactor::error(const HybridValues &values) const { +double HybridGaussianFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 2459af2596..467412b1d0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixtureFactor.h + * @file HybridGaussianFactor.h * @brief A set of GaussianFactors, indexed by a set of discrete keys. * @author Fan Jiang * @author Varun Agrawal @@ -44,10 +44,10 @@ class VectorValues; * * @ingroup hybrid */ -class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { +class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { public: using Base = HybridFactor; - using This = GaussianMixtureFactor; + using This = HybridGaussianFactor; using shared_ptr = std::shared_ptr; using sharedFactor = std::shared_ptr; @@ -72,7 +72,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { /// @{ /// Default constructor, mainly for serialization. - GaussianMixtureFactor() = default; + HybridGaussianFactor() = default; /** * @brief Construct a new Gaussian mixture factor. @@ -83,22 +83,22 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param factors The decision tree of Gaussian factors stored * as the mixture density. */ - GaussianMixtureFactor(const KeyVector &continuousKeys, + HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors); /** - * @brief Construct a new GaussianMixtureFactor object using a vector of + * @brief Construct a new HybridGaussianFactor object using a vector of * GaussianFactor shared pointers. * * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. */ - GaussianMixtureFactor(const KeyVector &continuousKeys, + HybridGaussianFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors) - : GaussianMixtureFactor(continuousKeys, discreteKeys, + : HybridGaussianFactor(continuousKeys, discreteKeys, Factors(discreteKeys, factors)) {} /// @} @@ -128,7 +128,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /** - * @brief Compute error of the GaussianMixtureFactor as a tree. + * @brief Compute error of the HybridGaussianFactor as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree with the same keys @@ -148,7 +148,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( - GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { + GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) { sum = factor.add(sum); return sum; } @@ -168,7 +168,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { // traits template <> -struct traits : public Testable { +struct traits : public Testable { }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index a9c0e53d2c..4a68f851b0 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -46,7 +46,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * * Examples: * - MixtureFactor - * - GaussianMixtureFactor + * - HybridGaussianFactor * - GaussianMixture * * @ingroup hybrid diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index efcd322aa0..24669863e9 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -24,7 +24,7 @@ #include #include #include -#include +#include #include #include #include @@ -92,7 +92,7 @@ void HybridGaussianFactorGraph::printErrors( // Clear the stringstream ss.str(std::string()); - if (auto gmf = std::dynamic_pointer_cast(factor)) { + if (auto gmf = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -178,7 +178,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { // TODO(dellaert): just use a virtual method defined in HybridFactor. if (auto gf = dynamic_pointer_cast(f)) { result = addGaussian(result, gf); - } else if (auto gmf = dynamic_pointer_cast(f)) { + } else if (auto gmf = dynamic_pointer_cast(f)) { result = gmf->add(result); } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); @@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); - } else if (auto gmf = dynamic_pointer_cast(f)) { - // Case where we have a GaussianMixtureFactor with no continuous keys. + } else if (auto gmf = dynamic_pointer_cast(f)) { + // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { @@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { /* ************************************************************************ */ using Result = std::pair, - GaussianMixtureFactor::sharedFactor>; + HybridGaussianFactor::sharedFactor>; /** * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) @@ -341,7 +341,7 @@ static std::shared_ptr createDiscreteFactor( return std::make_shared(discreteSeparator, probabilities); } -// Create GaussianMixtureFactor on the separator, taking care to correct +// Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. static std::shared_ptr createGaussianMixtureFactor( const DecisionTree &eliminationResults, @@ -362,7 +362,7 @@ static std::shared_ptr createGaussianMixtureFactor( DecisionTree newFactors(eliminationResults, correct); - return std::make_shared(continuousSeparator, + return std::make_shared(continuousSeparator, discreteSeparator, newFactors); } @@ -400,7 +400,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, DecisionTree eliminationResults(factorGraphTree, eliminate); // If there are no more continuous parents we create a DiscreteFactor with the - // error for each discrete choice. Otherwise, create a GaussianMixtureFactor + // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. auto newFactor = continuousSeparator.empty() @@ -549,7 +549,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( f = hc->inner(); } - if (auto gaussianMixture = dynamic_pointer_cast(f)) { + if (auto gaussianMixture = dynamic_pointer_cast(f)) { // Compute factor error and add it. error_tree = error_tree + gaussianMixture->errorTree(continuousValues); } else if (auto gaussian = dynamic_pointer_cast(f)) { @@ -597,7 +597,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); - } else if (auto gmf = std::dynamic_pointer_cast(f)) { + } else if (auto gmf = std::dynamic_pointer_cast(f)) { gfg.push_back((*gmf)(assignment)); } else if (auto gm = dynamic_pointer_cast(f)) { gfg.push_back((*gm)(assignment)); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 8a0a6f0a34..e6ce4467cc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -18,7 +18,7 @@ #pragma once -#include +#include #include #include #include diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index cdd448412a..923058d05a 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors( std::cout << std::endl; } } else if (auto gmf = - std::dynamic_pointer_cast(factor)) { + std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -152,7 +152,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( } // Check if it is a nonlinear mixture factor if (auto mf = dynamic_pointer_cast(f)) { - const GaussianMixtureFactor::shared_ptr& gmf = + const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); linearFG->push_back(gmf); } else if (auto nlf = dynamic_pointer_cast(f)) { @@ -161,7 +161,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( } else if (dynamic_pointer_cast(f)) { // If discrete-only: doesn't need linearization. linearFG->push_back(f); - } else if (auto gmf = dynamic_pointer_cast(f)) { + } else if (auto gmf = dynamic_pointer_cast(f)) { linearFG->push_back(gmf); } else if (auto gm = dynamic_pointer_cast(f)) { linearFG->push_back(gm); diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index 09a641b48a..d2e0e7822d 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -20,7 +20,7 @@ #pragma once #include -#include +#include #include #include #include @@ -233,8 +233,8 @@ class MixtureFactor : public HybridFactor { return factor->linearize(continuousValues); } - /// Linearize all the continuous factors to get a GaussianMixtureFactor. - std::shared_ptr linearize( + /// Linearize all the continuous factors to get a HybridGaussianFactor. + std::shared_ptr linearize( const Values& continuousValues) const { // functional to linearize each factor in the decision tree auto linearizeDT = [continuousValues](const sharedFactor& factor) { @@ -244,7 +244,7 @@ class MixtureFactor : public HybridFactor { DecisionTree linearized_factors( factors_, linearizeDT); - return std::make_shared( + return std::make_shared( continuousKeys_, discreteKeys_, linearized_factors); } diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 85c34e5755..0c72d50467 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -72,14 +72,14 @@ virtual class HybridConditional { double error(const gtsam::HybridValues& values) const; }; -#include -class GaussianMixtureFactor : gtsam::HybridFactor { - GaussianMixtureFactor( +#include +class HybridGaussianFactor : gtsam::HybridFactor { + HybridGaussianFactor( const gtsam::KeyVector& continuousKeys, const gtsam::DiscreteKeys& discreteKeys, const std::vector& factorsList); - void print(string s = "GaussianMixtureFactor\n", + void print(string s = "HybridGaussianFactor\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; @@ -92,7 +92,7 @@ class GaussianMixture : gtsam::HybridFactor { const std::vector& conditionalsList); - gtsam::GaussianMixtureFactor* likelihood( + gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; @@ -177,7 +177,7 @@ class HybridGaussianFactorGraph { void push_back(const gtsam::HybridGaussianFactorGraph& graph); void push_back(const gtsam::HybridBayesNet& bayesNet); void push_back(const gtsam::HybridBayesTree& bayesTree); - void push_back(const gtsam::GaussianMixtureFactor* gmm); + void push_back(const gtsam::HybridGaussianFactor* gmm); void push_back(gtsam::DecisionTreeFactor* factor); void push_back(gtsam::TableFactor* factor); void push_back(gtsam::JacobianFactor* factor); @@ -253,7 +253,7 @@ class MixtureFactor : gtsam::HybridFactor { double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const; - GaussianMixtureFactor* linearize( + HybridGaussianFactor* linearize( const gtsam::Values& continuousValues) const; void print(string s = "MixtureFactor\n", diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 4b2d3f11b6..126b15c6ea 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -19,7 +19,7 @@ #include #include #include -#include +#include #include #include #include @@ -57,7 +57,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Z_3x1), diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 4da61912e1..a379dd0362 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index fcd9dd08f8..cd20e72153 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -11,7 +11,7 @@ /** * @file testGaussianMixtureFactor.cpp - * @brief Unit tests for GaussianMixtureFactor + * @brief Unit tests for HybridGaussianFactor * @author Varun Agrawal * @author Fan Jiang * @author Frank Dellaert @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -43,17 +43,17 @@ using symbol_shorthand::Z; /* ************************************************************************* */ // Check iterators of empty mixture. -TEST(GaussianMixtureFactor, Constructor) { - GaussianMixtureFactor factor; - GaussianMixtureFactor::const_iterator const_it = factor.begin(); +TEST(HybridGaussianFactor, Constructor) { + HybridGaussianFactor factor; + HybridGaussianFactor::const_iterator const_it = factor.begin(); CHECK(const_it == factor.end()); - GaussianMixtureFactor::iterator it = factor.begin(); + HybridGaussianFactor::iterator it = factor.begin(); CHECK(it == factor.end()); } /* ************************************************************************* */ // "Add" two mixture factors together. -TEST(GaussianMixtureFactor, Sum) { +TEST(HybridGaussianFactor, Sum) { DiscreteKey m1(1, 2), m2(2, 3); auto A1 = Matrix::Zero(2, 1); @@ -74,8 +74,8 @@ TEST(GaussianMixtureFactor, Sum) { // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); - GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); + HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); + HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); @@ -99,7 +99,7 @@ TEST(GaussianMixtureFactor, Sum) { } /* ************************************************************************* */ -TEST(GaussianMixtureFactor, Printing) { +TEST(HybridGaussianFactor, Printing) { DiscreteKey m1(1, 2); auto A1 = Matrix::Zero(2, 1); auto A2 = Matrix::Zero(2, 2); @@ -108,10 +108,10 @@ TEST(GaussianMixtureFactor, Printing) { auto f11 = std::make_shared(X(1), A1, X(2), A2, b); std::vector factors{f10, f11}; - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(GaussianMixtureFactor + R"(HybridGaussianFactor Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } /* ************************************************************************* */ -TEST(GaussianMixtureFactor, GaussianMixture) { +TEST(HybridGaussianFactor, GaussianMixture) { KeyVector keys; keys.push_back(X(0)); keys.push_back(X(1)); @@ -161,8 +161,8 @@ TEST(GaussianMixtureFactor, GaussianMixture) { } /* ************************************************************************* */ -// Test the error of the GaussianMixtureFactor -TEST(GaussianMixtureFactor, Error) { +// Test the error of the HybridGaussianFactor +TEST(HybridGaussianFactor, Error) { DiscreteKey m1(1, 2); auto A01 = Matrix2::Identity(); @@ -177,7 +177,7 @@ TEST(GaussianMixtureFactor, Error) { auto f1 = std::make_shared(X(1), A11, X(2), A12, b); std::vector factors{f0, f1}; - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); @@ -250,7 +250,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, * The resulting factor graph should eliminate to a Bayes net * which represents a sigmoid function. */ -TEST(GaussianMixtureFactor, GaussianMixtureModel) { +TEST(HybridGaussianFactor, GaussianMixtureModel) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -322,7 +322,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * which represents a Gaussian-like function * where m1>m0 close to 3.1333. */ -TEST(GaussianMixtureFactor, GaussianMixtureModel2) { +TEST(HybridGaussianFactor, GaussianMixtureModel2) { using namespace test_gmm; double mu0 = 1.0, mu1 = 3.0; @@ -446,7 +446,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, * the probability of m1 should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel) { +TEST(HybridGaussianFactor, TwoStateModel) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -500,7 +500,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel2) { +TEST(HybridGaussianFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index bdc2987627..2ec9c514a5 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -531,10 +531,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * Helper function to add the constant term corresponding to * the difference in noise models. */ -std::shared_ptr mixedVarianceFactor( +std::shared_ptr mixedVarianceFactor( const MixtureFactor& mf, const Values& initial, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { - GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); + HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial); constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model @@ -560,7 +560,7 @@ std::shared_ptr mixedVarianceFactor( } }; auto updated_components = gmf->factors().apply(func); - auto updated_gmf = std::make_shared( + auto updated_gmf = std::make_shared( gmf->continuousKeys(), gmf->discreteKeys(), updated_components); return updated_gmf; diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 33c0761eb4..bc0ef91d77 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) { DecisionTree dt( M(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); KeySet expected_continuous{X(0), X(1)}; EXPECT( diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index a7a315c87d..96d0c478ea 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -129,7 +129,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { DecisionTree dt( M(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); auto result = hfg.eliminateSequential(); @@ -155,7 +155,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { std::vector factors = { std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())}; - hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors)); // Discrete probability table for c1 hfg.add(DecisionTreeFactor(m1, {2, 8})); @@ -177,7 +177,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {X(1)}, {{M(1), 2}}, {std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())})); @@ -212,7 +212,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { std::make_shared(X(1), I_3x3, Vector3::Ones())); // Hybrid factor P(x1|c1) - hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {m}, dt)); // Prior factor on c1 hfg.add(DecisionTreeFactor(m, {2, 8})); @@ -237,7 +237,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - hfg.add(GaussianMixtureFactor( + hfg.add(HybridGaussianFactor( {X(0)}, {{M(0), 2}}, {std::make_shared(X(0), I_3x3, Z_3x1), std::make_shared(X(0), I_3x3, Vector3::Ones())})); @@ -246,7 +246,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { M(1), std::make_shared(X(2), I_3x3, Z_3x1), std::make_shared(X(2), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1)); } hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); @@ -259,13 +259,13 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { M(3), std::make_shared(X(3), I_3x3, Z_3x1), std::make_shared(X(3), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); + hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt)); DecisionTree dt1( M(2), std::make_shared(X(5), I_3x3, Z_3x1), std::make_shared(X(5), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); + hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1)); } auto ordering_full = @@ -555,7 +555,7 @@ TEST(HybridGaussianFactorGraph, optimize) { C(1), std::make_shared(X(1), I_3x3, Z_3x1), std::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt)); auto result = hfg.eliminateSequential(); @@ -717,7 +717,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Create expected decision tree with two factor graphs: // Get mixture factor: - auto mixture = fg.at(0); + auto mixture = fg.at(0); CHECK(mixture); // Get prior factor: diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 2d851b0fff..67b820c334 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,7 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: -GaussianMixtureFactor +HybridGaussianFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -535,7 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: -GaussianMixtureFactor +HybridGaussianFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 961618626e..31fe61d9b7 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -19,7 +19,7 @@ #include #include #include -#include +#include #include #include #include @@ -51,12 +51,12 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_GaussianMixtureFactor"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors, "gtsam_GaussianMixtureFactor_Factors"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, "gtsam_GaussianMixtureFactor_Factors_Leaf"); -BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice"); BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); @@ -72,8 +72,8 @@ BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); /* ****************************************************************************/ -// Test GaussianMixtureFactor serialization. -TEST(HybridSerialization, GaussianMixtureFactor) { +// Test HybridGaussianFactor serialization. +TEST(HybridSerialization, HybridGaussianFactor) { KeyVector continuousKeys{X(0)}; DiscreteKeys discreteKeys{{M(0), 2}}; @@ -84,11 +84,11 @@ TEST(HybridSerialization, GaussianMixtureFactor) { auto f1 = std::make_shared(X(0), A, b1); std::vector factors{f0, f1}; - const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors); + const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors); - EXPECT(equalsObj(factor)); - EXPECT(equalsXML(factor)); - EXPECT(equalsBinary(factor)); + EXPECT(equalsObj(factor)); + EXPECT(equalsXML(factor)); + EXPECT(equalsBinary(factor)); } /* ****************************************************************************/ diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 499afe09f4..02a9b7b7d3 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -18,7 +18,7 @@ import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, - GaussianMixture, GaussianMixtureFactor, HybridBayesNet, + GaussianMixture, HybridBayesNet, HybridGaussianFactor, HybridGaussianFactorGraph, HybridValues, JacobianFactor, Ordering, noiseModel) @@ -36,7 +36,7 @@ def test_create(self): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) @@ -63,7 +63,7 @@ def test_optimize(self): jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) - gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) + gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2]) hfg = HybridGaussianFactorGraph() hfg.push_back(jf1) From 187935407c27974c0e61e8a6b22eec43d2b622b4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:40:00 -0400 Subject: [PATCH 2/7] rename MixtureFactor to HybridNonlinearFactor --- gtsam/hybrid/GaussianMixture.cpp | 8 ++--- gtsam/hybrid/GaussianMixtureFactor.h | 2 +- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 ++-- gtsam/hybrid/MixtureFactor.h | 28 ++++++++--------- gtsam/hybrid/hybrid.i | 10 +++---- gtsam/hybrid/tests/Switching.h | 4 +-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +++---- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 6 ++-- .../tests/testHybridNonlinearFactorGraph.cpp | 18 +++++------ .../hybrid/tests/testHybridNonlinearISAM.cpp | 6 ++-- gtsam/hybrid/tests/testMixtureFactor.cpp | 30 +++++++++---------- .../tests/test_HybridNonlinearFactorGraph.py | 4 +-- 14 files changed, 68 insertions(+), 68 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index feb49b6a23..f1f1ce4297 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -72,7 +72,7 @@ GaussianMixture::GaussianMixture( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: GaussianMixture should be derived from -// HybridGaussianFactor, no? +// GaussianMixtureFactor, no? GaussianFactorGraphTree GaussianMixture::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr GaussianMixture::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr GaussianMixture::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const HybridGaussianFactor::Factors likelihoods( + const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr GaussianMixture::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 467412b1d0..9915e24e67 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -146,7 +146,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Getter for GaussianFactor decision tree const Factors &factors() const { return factors_; } - /// Add MixtureFactor to a Sum, syntactic sugar. + /// Add HybridNonlinearFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) { sum = factor.add(sum); diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 4a68f851b0..2a572ea655 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -45,7 +45,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * Base class for *truly* hybrid probabilistic factors * * Examples: - * - MixtureFactor + * - HybridNonlinearFactor * - HybridGaussianFactor * - GaussianMixture * diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 923058d05a..35a8ea3552 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include namespace gtsam { @@ -59,7 +59,7 @@ void HybridNonlinearFactorGraph::printErrors( // Clear the stringstream ss.str(std::string()); - if (auto mf = std::dynamic_pointer_cast(factor)) { + if (auto mf = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -151,7 +151,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( continue; } // Check if it is a nonlinear mixture factor - if (auto mf = dynamic_pointer_cast(f)) { + if (auto mf = dynamic_pointer_cast(f)) { const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); linearFG->push_back(gmf); diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index d2e0e7822d..d3d673dda3 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file MixtureFactor.h + * @file HybridNonlinearFactor.h * @brief Nonlinear Mixture factor of continuous and discrete. * @author Kevin Doherty, kdoherty@mit.edu * @author Varun Agrawal @@ -44,11 +44,11 @@ namespace gtsam { * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * the correct operation. */ -class MixtureFactor : public HybridFactor { +class HybridNonlinearFactor : public HybridFactor { public: using Base = HybridFactor; - using This = MixtureFactor; - using shared_ptr = std::shared_ptr; + using This = HybridNonlinearFactor; + using shared_ptr = std::shared_ptr; using sharedFactor = std::shared_ptr; /** @@ -63,7 +63,7 @@ class MixtureFactor : public HybridFactor { bool normalized_; public: - MixtureFactor() = default; + HybridNonlinearFactor() = default; /** * @brief Construct from Decision tree. @@ -74,7 +74,7 @@ class MixtureFactor : public HybridFactor { * @param normalized Flag indicating if the factor error is already * normalized. */ - MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, const Factors& factors, bool normalized = false) : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} @@ -95,7 +95,7 @@ class MixtureFactor : public HybridFactor { * normalized. */ template - MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, const std::vector>& factors, bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { @@ -111,7 +111,7 @@ class MixtureFactor : public HybridFactor { nonlinear_factors.push_back(nf); } else { throw std::runtime_error( - "Factors passed into MixtureFactor need to be nonlinear!"); + "Factors passed into HybridNonlinearFactor need to be nonlinear!"); } } factors_ = Factors(discreteKeys, nonlinear_factors); @@ -124,7 +124,7 @@ class MixtureFactor : public HybridFactor { } /** - * @brief Compute error of the MixtureFactor as a tree. + * @brief Compute error of the HybridNonlinearFactor as a tree. * * @param continuousValues The continuous values for which to compute the * error. @@ -201,15 +201,15 @@ class MixtureFactor : public HybridFactor { /// Check equality bool equals(const HybridFactor& other, double tol = 1e-9) const override { - // We attempt a dynamic cast from HybridFactor to MixtureFactor. If it + // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If it // fails, return false. - if (!dynamic_cast(&other)) return false; + if (!dynamic_cast(&other)) return false; - // If the cast is successful, we'll properly construct a MixtureFactor + // If the cast is successful, we'll properly construct a HybridNonlinearFactor // object from `other` - const MixtureFactor& f(static_cast(other)); + const HybridNonlinearFactor& f(static_cast(other)); - // Ensure that this MixtureFactor and `f` have the same `factors_`. + // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { return traits::Equals(*a, *b, tol); }; diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 0c72d50467..e5be00c6f7 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -235,15 +235,15 @@ class HybridNonlinearFactorGraph { gtsam::DefaultKeyFormatter) const; }; -#include -class MixtureFactor : gtsam::HybridFactor { - MixtureFactor( +#include +class HybridNonlinearFactor : gtsam::HybridFactor { + HybridNonlinearFactor( const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const gtsam::DecisionTree& factors, bool normalized = false); template - MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + HybridNonlinearFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const std::vector& factors, bool normalized = false); @@ -256,7 +256,7 @@ class MixtureFactor : gtsam::HybridFactor { HybridGaussianFactor* linearize( const gtsam::Values& continuousValues) const; - void print(string s = "MixtureFactor\n", + void print(string s = "HybridNonlinearFactor\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 126b15c6ea..0e8680aa91 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -163,7 +163,7 @@ struct Switching { for (auto &&f : motion_models) { components.push_back(std::dynamic_pointer_cast(f)); } - nonlinearFactorGraph.emplace_shared( + nonlinearFactorGraph.emplace_shared( keys, DiscreteKeys{modes[k]}, components); } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 3a7e008d80..9b7ba97448 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -389,7 +389,7 @@ TEST(HybridBayesNet, Sampling) { std::make_shared>(X(0), X(1), 1, noise_model); std::vector factors = {zero_motion, one_motion}; nfg.emplace_shared>(X(0), 0.0, noise_model); - nfg.emplace_shared( + nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); DiscreteKey mode(M(0), 2); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 2ec9c514a5..2956339163 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #include #include @@ -435,7 +435,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 0, noise_model); const auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - nfg.emplace_shared( + nfg.emplace_shared( KeyVector{X(0), X(1)}, DiscreteKeys{m}, std::vector{zero_motion, one_motion}); @@ -532,7 +532,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * the difference in noise models. */ std::shared_ptr mixedVarianceFactor( - const MixtureFactor& mf, const Values& initial, const Key& mode, + const HybridNonlinearFactor& mf, const Values& initial, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial); @@ -592,7 +592,7 @@ TEST(HybridEstimation, ModeSelection) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - MixtureFactor mf(keys, modes, components); + HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); @@ -683,7 +683,7 @@ TEST(HybridEstimation, ModeSelection2) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - MixtureFactor mf(keys, modes, components); + HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 249cbf9c3a..d2b0fded34 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -418,7 +418,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(0), W(1), odometry, noise_model); std::vector components = {moving, still}; - auto mixtureFactor = std::make_shared( + auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -458,7 +458,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -501,7 +501,7 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 67b820c334..9e2816865b 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -24,7 +24,7 @@ #include #include #include -#include +#include #include #include #include @@ -105,7 +105,7 @@ TEST(HybridNonlinearFactorGraph, Resize) { auto discreteFactor = std::make_shared(); fg.push_back(discreteFactor); - auto dcFactor = std::make_shared(); + auto dcFactor = std::make_shared(); fg.push_back(dcFactor); EXPECT_LONGS_EQUAL(fg.size(), 3); @@ -132,7 +132,7 @@ TEST(HybridGaussianFactorGraph, Resize) { moving = std::make_shared(X(0), X(1), 1.0, noise_model); std::vector components = {still, moving}; - auto dcFactor = std::make_shared( + auto dcFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); nhfg.push_back(dcFactor); @@ -150,10 +150,10 @@ TEST(HybridGaussianFactorGraph, Resize) { } /*************************************************************************** - * Test that the MixtureFactor reports correctly if the number of continuous + * Test that the HybridNonlinearFactor reports correctly if the number of continuous * keys provided do not match the keys in the factors. */ -TEST(HybridGaussianFactorGraph, MixtureFactor) { +TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { auto nonlinearFactor = std::make_shared>( X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); auto discreteFactor = std::make_shared(); @@ -166,12 +166,12 @@ TEST(HybridGaussianFactorGraph, MixtureFactor) { // Check for exception when number of continuous keys are under-specified. KeyVector contKeys = {X(0)}; - THROWS_EXCEPTION(std::make_shared( + THROWS_EXCEPTION(std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); // Check for exception when number of continuous keys are too many. contKeys = {X(0), X(1), X(2)}; - THROWS_EXCEPTION(std::make_shared( + THROWS_EXCEPTION(std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); } @@ -195,7 +195,7 @@ TEST(HybridFactorGraph, PushBack) { fg = HybridNonlinearFactorGraph(); - auto dcFactor = std::make_shared(); + auto dcFactor = std::make_shared(); fg.push_back(dcFactor); EXPECT_LONGS_EQUAL(fg.size(), 1); @@ -800,7 +800,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { moving = std::make_shared(X(0), X(1), odometry, noise_model); std::vector motion_models = {still, moving}; - fg.emplace_shared( + fg.emplace_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 2fb6fd8baf..e2ed4cbad1 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -437,7 +437,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(0), W(1), odometry, noise_model); std::vector components = {moving, still}; - auto mixtureFactor = std::make_shared( + auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); fg.push_back(mixtureFactor); @@ -477,7 +477,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); fg.push_back(mixtureFactor); @@ -520,7 +520,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; - mixtureFactor = std::make_shared( + mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); fg.push_back(mixtureFactor); diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index a58a4767f0..dead8cd346 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -11,7 +11,7 @@ /** * @file testMixtureFactor.cpp - * @brief Unit tests for MixtureFactor + * @brief Unit tests for HybridNonlinearFactor * @author Varun Agrawal * @date October 2022 */ @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include @@ -36,17 +36,17 @@ using symbol_shorthand::X; /* ************************************************************************* */ // Check iterators of empty mixture. -TEST(MixtureFactor, Constructor) { - MixtureFactor factor; - MixtureFactor::const_iterator const_it = factor.begin(); +TEST(HybridNonlinearFactor, Constructor) { + HybridNonlinearFactor factor; + HybridNonlinearFactor::const_iterator const_it = factor.begin(); CHECK(const_it == factor.end()); - MixtureFactor::iterator it = factor.begin(); + HybridNonlinearFactor::iterator it = factor.begin(); CHECK(it == factor.end()); } /* ************************************************************************* */ // Test .print() output. -TEST(MixtureFactor, Printing) { +TEST(HybridNonlinearFactor, Printing) { DiscreteKey m1(1, 2); double between0 = 0.0; double between1 = 1.0; @@ -60,11 +60,11 @@ TEST(MixtureFactor, Printing) { std::make_shared>(X(1), X(2), between1, model); std::vector factors{f0, f1}; - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(Hybrid [x1 x2; 1] -MixtureFactor +HybridNonlinearFactor Choice(1) 0 Leaf Nonlinear factor on 2 keys 1 Leaf Nonlinear factor on 2 keys @@ -73,7 +73,7 @@ MixtureFactor } /* ************************************************************************* */ -static MixtureFactor getMixtureFactor() { +static HybridNonlinearFactor getMixtureFactor() { DiscreteKey m1(1, 2); double between0 = 0.0; @@ -88,12 +88,12 @@ static MixtureFactor getMixtureFactor() { std::make_shared>(X(1), X(2), between1, model); std::vector factors{f0, f1}; - return MixtureFactor({X(1), X(2)}, {m1}, factors); + return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors); } /* ************************************************************************* */ -// Test the error of the MixtureFactor -TEST(MixtureFactor, Error) { +// Test the error of the HybridNonlinearFactor +TEST(HybridNonlinearFactor, Error) { auto mixtureFactor = getMixtureFactor(); Values continuousValues; @@ -112,8 +112,8 @@ TEST(MixtureFactor, Error) { } /* ************************************************************************* */ -// Test dim of the MixtureFactor -TEST(MixtureFactor, Dim) { +// Test dim of the HybridNonlinearFactor +TEST(HybridNonlinearFactor, Dim) { auto mixtureFactor = getMixtureFactor(); EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index ed26a0c813..3c2a70f734 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -17,9 +17,9 @@ import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase -from gtsam import BetweenFactorPoint3, noiseModel, PriorFactorPoint3, Point3 import gtsam +from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel class TestHybridGaussianFactorGraph(GtsamTestCase): @@ -34,7 +34,7 @@ def test_nonlinear_hybrid(self): PriorFactorPoint3(2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) nlfg.push_back( - gtsam.MixtureFactor([1], dk, [ + gtsam.HybridNonlinearFactor([1], dk, [ PriorFactorPoint3(1, Point3(0, 0, 0), noiseModel.Unit.Create(3)), PriorFactorPoint3(1, Point3(1, 2, 1), From aef273bce8299a561b5b9829125688116076bfc4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:41:24 -0400 Subject: [PATCH 3/7] rename GaussianMixture to HybridGaussianConditional --- doc/Hybrid.lyx | 12 ++-- gtsam/hybrid/GaussianMixture.cpp | 58 +++++++++---------- gtsam/hybrid/GaussianMixture.h | 32 +++++----- gtsam/hybrid/HybridBayesNet.cpp | 8 +-- gtsam/hybrid/HybridBayesNet.h | 4 +- gtsam/hybrid/HybridConditional.cpp | 2 +- gtsam/hybrid/HybridConditional.h | 18 +++--- gtsam/hybrid/HybridFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 14 ++--- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +- gtsam/hybrid/HybridSmoother.cpp | 2 +- gtsam/hybrid/HybridSmoother.h | 2 +- gtsam/hybrid/hybrid.i | 12 ++-- gtsam/hybrid/tests/TinyHybridExample.h | 2 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 28 ++++----- .../tests/testGaussianMixtureFactor.cpp | 12 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 4 +- gtsam/hybrid/tests/testHybridConditional.cpp | 4 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 4 +- .../tests/testHybridGaussianFactorGraph.cpp | 20 +++---- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 16 ++--- .../tests/testHybridNonlinearFactorGraph.cpp | 2 +- .../hybrid/tests/testHybridNonlinearISAM.cpp | 16 ++--- .../hybrid/tests/testSerializationHybrid.cpp | 22 +++---- gtsam/inference/Conditional.h | 2 +- python/gtsam/tests/test_HybridBayesNet.py | 7 ++- python/gtsam/tests/test_HybridFactorGraph.py | 10 ++-- 27 files changed, 161 insertions(+), 160 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index b5901d4953..17d7094e10 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -191,13 +191,13 @@ E_{gc}(x,y)=\frac{1}{2}\|Rx+Sy-d\|_{\Sigma}^{2}.\label{eq:gc_error} \end_layout \begin_layout Subsubsection* -GaussianMixture +HybridGaussianConditional \end_layout \begin_layout Standard A \emph on -GaussianMixture +HybridGaussianConditional \emph default (maybe to be renamed to \emph on @@ -233,7 +233,7 @@ GaussianConditional to a set of discrete variables. As \emph on -GaussianMixture +HybridGaussianConditional \emph default is a \emph on @@ -324,7 +324,7 @@ The key point here is that \color inherit is the log-normalization constant for the complete \emph on -GaussianMixture +HybridGaussianConditional \emph default across all values of \begin_inset Formula $m$ @@ -556,7 +556,7 @@ Analogously, a \emph on HybridGaussianFactor \emph default - typically results from a GaussianMixture by having known values + typically results from a HybridGaussianConditional by having known values \begin_inset Formula $\bar{x}$ \end_inset @@ -817,7 +817,7 @@ E_{mf}(y,m)=\frac{1}{2}\|A_{m}y-b_{m}\|_{\Sigma_{mfm}}^{2}=E_{gcm}(\bar{x},y)+K_ \end_inset -which is identical to the GaussianMixture error +which is identical to the HybridGaussianConditional error \begin_inset CommandInset ref LatexCommand eqref reference "eq:gm_error" diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index f1f1ce4297..451493ae0e 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixture.cpp + * @file HybridGaussianConditional.cpp * @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @author Fan Jiang * @author Varun Agrawal @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include @@ -29,10 +29,10 @@ namespace gtsam { -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, - const GaussianMixture::Conditionals &conditionals) + const HybridGaussianConditional::Conditionals &conditionals) : BaseFactor(CollectKeys(continuousFrontals, continuousParents), discreteParents), BaseConditional(continuousFrontals.size()), @@ -50,30 +50,30 @@ GaussianMixture::GaussianMixture( } /* *******************************************************************************/ -const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { +const HybridGaussianConditional::Conditionals &HybridGaussianConditional::conditionals() const { return conditionals_; } /* *******************************************************************************/ -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals) - : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -GaussianMixture::GaussianMixture( +HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals) - : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from +// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from // GaussianMixtureFactor, no? -GaussianFactorGraphTree GaussianMixture::add( +GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; auto add = [](const Y &graph1, const Y &graph2) { @@ -86,7 +86,7 @@ GaussianFactorGraphTree GaussianMixture::add( } /* *******************************************************************************/ -GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { +GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { @@ -109,7 +109,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { } /* *******************************************************************************/ -size_t GaussianMixture::nrComponents() const { +size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { if (node) total += 1; @@ -118,7 +118,7 @@ size_t GaussianMixture::nrComponents() const { } /* *******************************************************************************/ -GaussianConditional::shared_ptr GaussianMixture::operator()( +GaussianConditional::shared_ptr HybridGaussianConditional::operator()( const DiscreteValues &discreteValues) const { auto &ptr = conditionals_(discreteValues); if (!ptr) return nullptr; @@ -127,11 +127,11 @@ GaussianConditional::shared_ptr GaussianMixture::operator()( return conditional; else throw std::logic_error( - "A GaussianMixture unexpectedly contained a non-conditional"); + "A HybridGaussianConditional unexpectedly contained a non-conditional"); } /* *******************************************************************************/ -bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -149,7 +149,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { } /* *******************************************************************************/ -void GaussianMixture::print(const std::string &s, +void HybridGaussianConditional::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous()) std::cout << "Continuous "; @@ -177,7 +177,7 @@ void GaussianMixture::print(const std::string &s, } /* ************************************************************************* */ -KeyVector GaussianMixture::continuousParents() const { +KeyVector HybridGaussianConditional::continuousParents() const { // Get all parent keys: const auto range = parents(); KeyVector continuousParentKeys(range.begin(), range.end()); @@ -193,7 +193,7 @@ KeyVector GaussianMixture::continuousParents() const { } /* ************************************************************************* */ -bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { +bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) const { for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; @@ -203,11 +203,11 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { } /* ************************************************************************* */ -std::shared_ptr GaussianMixture::likelihood( +std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( - "GaussianMixture::likelihood: given values are missing some frontals."); + "HybridGaussianConditional::likelihood: given values are missing some frontals."); } const DiscreteKeys discreteParentKeys = discreteKeys(); @@ -252,7 +252,7 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { */ std::function &, const GaussianConditional::shared_ptr &)> -GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) { +HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree // and the gaussian mixture. auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys()); @@ -303,7 +303,7 @@ GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) { } /* *******************************************************************************/ -void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) { +void HybridGaussianConditional::prune(const DecisionTreeFactor &discreteProbs) { // Functional which loops over all assignments and create a set of // GaussianConditionals auto pruner = prunerFunc(discreteProbs); @@ -313,7 +313,7 @@ void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) { } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixture::logProbability( +AlgebraicDecisionTree HybridGaussianConditional::logProbability( const VectorValues &continuousValues) const { // functor to calculate (double) logProbability value from // GaussianConditional. @@ -331,7 +331,7 @@ AlgebraicDecisionTree GaussianMixture::logProbability( } /* ************************************************************************* */ -double GaussianMixture::conditionalError( +double HybridGaussianConditional::conditionalError( const GaussianConditional::shared_ptr &conditional, const VectorValues &continuousValues) const { // Check if valid pointer @@ -348,7 +348,7 @@ double GaussianMixture::conditionalError( } /* *******************************************************************************/ -AlgebraicDecisionTree GaussianMixture::errorTree( +AlgebraicDecisionTree HybridGaussianConditional::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { return conditionalError(conditional, continuousValues); @@ -358,20 +358,20 @@ AlgebraicDecisionTree GaussianMixture::errorTree( } /* *******************************************************************************/ -double GaussianMixture::error(const HybridValues &values) const { +double HybridGaussianConditional::error(const HybridValues &values) const { // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); return conditionalError(conditional, values.continuous()); } /* *******************************************************************************/ -double GaussianMixture::logProbability(const HybridValues &values) const { +double HybridGaussianConditional::logProbability(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } /* *******************************************************************************/ -double GaussianMixture::evaluate(const HybridValues &values) const { +double HybridGaussianConditional::evaluate(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->evaluate(values.continuous()); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 10feaf55bf..87ddb2cb81 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianMixture.h + * @file HybridGaussianConditional.h * @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @author Fan Jiang * @author Varun Agrawal @@ -50,14 +50,14 @@ class HybridValues; * * @ingroup hybrid */ -class GTSAM_EXPORT GaussianMixture +class GTSAM_EXPORT HybridGaussianConditional : public HybridFactor, - public Conditional { + public Conditional { public: - using This = GaussianMixture; - using shared_ptr = std::shared_ptr; + using This = HybridGaussianConditional; + using shared_ptr = std::shared_ptr; using BaseFactor = HybridFactor; - using BaseConditional = Conditional; + using BaseConditional = Conditional; /// typedef for Decision Tree of Gaussian Conditionals using Conditionals = DecisionTree; @@ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a GaussianMixture of conditionals into + * @brief Convert a HybridGaussianConditional of conditionals into * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; @@ -88,10 +88,10 @@ class GTSAM_EXPORT GaussianMixture /// @{ /// Default constructor, mainly for serialization. - GaussianMixture() = default; + HybridGaussianConditional() = default; /** - * @brief Construct a new GaussianMixture object. + * @brief Construct a new HybridGaussianConditional object. * * @param continuousFrontals the continuous frontals. * @param continuousParents the continuous parents. @@ -101,7 +101,7 @@ class GTSAM_EXPORT GaussianMixture * cardinality of the DiscreteKeys in discreteParents, since the * discreteParents will be used as the labels in the decision tree. */ - GaussianMixture(const KeyVector &continuousFrontals, + HybridGaussianConditional(const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const Conditionals &conditionals); @@ -114,7 +114,7 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents, + HybridGaussianConditional(KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals); @@ -126,7 +126,7 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - GaussianMixture( + HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals); @@ -140,7 +140,7 @@ class GTSAM_EXPORT GaussianMixture /// Print utility void print( - const std::string &s = "GaussianMixture\n", + const std::string &s = "HybridGaussianConditional\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} @@ -172,7 +172,7 @@ class GTSAM_EXPORT GaussianMixture const Conditionals &conditionals() const; /** - * @brief Compute logProbability of the GaussianMixture as a tree. + * @brief Compute logProbability of the HybridGaussianConditional as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree with the same keys @@ -209,7 +209,7 @@ class GTSAM_EXPORT GaussianMixture double error(const HybridValues &values) const override; /** - * @brief Compute error of the GaussianMixture as a tree. + * @brief Compute error of the HybridGaussianConditional as a tree. * * @param continuousValues The continuous VectorValues. * @return AlgebraicDecisionTree A decision tree on the discrete keys @@ -277,6 +277,6 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits template <> -struct traits : public Testable {}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1d01baed20..90aa6cde28 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -168,11 +168,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { DecisionTreeFactor prunedDiscreteProbs = this->pruneDiscreteConditionals(maxNrLeaves); - /* To prune, we visitWith every leaf in the GaussianMixture. + /* To prune, we visitWith every leaf in the HybridGaussianConditional. * For each leaf, using the assignment we can check the discrete decision tree * for 0.0 probability, then just set the leaf to a nullptr. * - * We can later check the GaussianMixture for just nullptrs. + * We can later check the HybridGaussianConditional for just nullptrs. */ HybridBayesNet prunedBayesNetFragment; @@ -182,14 +182,14 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedGaussianMixture = std::make_shared(*gm); + auto prunedGaussianMixture = std::make_shared(*gm); prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back(prunedGaussianMixture); } else { - // Add the non-GaussianMixture conditional + // Add the non-HybridGaussianConditional conditional prunedBayesNetFragment.push_back(conditional); } } diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 8fae4061d9..5c453c1065 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -79,7 +79,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * * Example: * auto shared_ptr_to_a_conditional = - * std::make_shared(...); + * std::make_shared(...); * hbn.push_back(shared_ptr_to_a_conditional); */ void push_back(HybridConditional &&conditional) { @@ -106,7 +106,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { * Preferred: Emplace a conditional directly using arguments. * * Examples: - * hbn.emplace_shared(...))); + * hbn.emplace_shared(...))); * hbn.emplace_shared(...))); * hbn.emplace_shared(...))); */ diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 6c48934769..5ef74ea7de 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -55,7 +55,7 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - const std::shared_ptr &gaussianMixture) + const std::shared_ptr &gaussianMixture) : BaseFactor(KeyVector(gaussianMixture->keys().begin(), gaussianMixture->keys().begin() + gaussianMixture->nrContinuous()), diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index fb6542822e..d02b69127b 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -18,7 +18,7 @@ #pragma once #include -#include +#include #include #include #include @@ -39,7 +39,7 @@ namespace gtsam { * As a type-erased variant of: * - DiscreteConditional * - GaussianConditional - * - GaussianMixture + * - HybridGaussianConditional * * The reason why this is important is that `Conditional` is a CRTP class. * CRTP is static polymorphism such that all CRTP classes, while bearing the @@ -127,7 +127,7 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional(const std::shared_ptr& gaussianMixture); + HybridConditional(const std::shared_ptr& gaussianMixture); /// @} /// @name Testable @@ -146,12 +146,12 @@ class GTSAM_EXPORT HybridConditional /// @{ /** - * @brief Return HybridConditional as a GaussianMixture + * @brief Return HybridConditional as a HybridGaussianConditional * @return nullptr if not a mixture - * @return GaussianMixture::shared_ptr otherwise + * @return HybridGaussianConditional::shared_ptr otherwise */ - GaussianMixture::shared_ptr asMixture() const { - return std::dynamic_pointer_cast(inner_); + HybridGaussianConditional::shared_ptr asMixture() const { + return std::dynamic_pointer_cast(inner_); } /** @@ -222,8 +222,8 @@ class GTSAM_EXPORT HybridConditional boost::serialization::void_cast_register( static_cast(NULL), static_cast(NULL)); } else { - boost::serialization::void_cast_register( - static_cast(NULL), static_cast(NULL)); + boost::serialization::void_cast_register( + static_cast(NULL), static_cast(NULL)); } } #endif diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 2a572ea655..c661165125 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -47,7 +47,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, * Examples: * - HybridNonlinearFactor * - HybridGaussianFactor - * - GaussianMixture + * - HybridGaussianConditional * * @ingroup hybrid */ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 24669863e9..656060ae66 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -23,7 +23,7 @@ #include #include #include -#include +#include #include #include #include @@ -180,7 +180,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { result = addGaussian(result, gf); } else if (auto gmf = dynamic_pointer_cast(f)) { result = gmf->add(result); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gm = hc->asMixture()) { @@ -408,10 +408,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, : createGaussianMixtureFactor(eliminationResults, continuousSeparator, discreteSeparator); - // Create the GaussianMixture from the conditionals - GaussianMixture::Conditionals conditionals( + // Create the HybridGaussianConditional from the conditionals + HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const Result &pair) { return pair.first; }); - auto gaussianMixture = std::make_shared( + auto gaussianMixture = std::make_shared( frontalKeys, continuousSeparator, discreteSeparator, conditionals); return {std::make_shared(gaussianMixture), newFactor}; @@ -458,7 +458,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // Because of all these reasons, we carefully consider how to // implement the hybrid factors so that we do not get poor performance. - // The first thing is how to represent the GaussianMixture. + // The first thing is how to represent the HybridGaussianConditional. // A very possible scenario is that the incoming factors will have different // levels of discrete keys. For example, imagine we are going to eliminate the // fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid. @@ -599,7 +599,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()( gfg.push_back(gf); } else if (auto gmf = std::dynamic_pointer_cast(f)) { gfg.push_back((*gmf)(assignment)); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { gfg.push_back((*gm)(assignment)); } else { continue; diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 35a8ea3552..76703ad2df 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -18,7 +18,7 @@ #include #include -#include +#include #include #include #include @@ -80,7 +80,7 @@ void HybridNonlinearFactorGraph::printErrors( gmf->errorTree(values.continuous()).print("", keyFormatter); std::cout << std::endl; } - } else if (auto gm = std::dynamic_pointer_cast(factor)) { + } else if (auto gm = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; @@ -163,7 +163,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( linearFG->push_back(f); } else if (auto gmf = dynamic_pointer_cast(f)) { linearFG->push_back(gmf); - } else if (auto gm = dynamic_pointer_cast(f)) { + } else if (auto gm = dynamic_pointer_cast(f)) { linearFG->push_back(gm); } else if (dynamic_pointer_cast(f)) { linearFG->push_back(f); diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index afa8340d2b..546837bf98 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -138,7 +138,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, } /* ************************************************************************* */ -GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( +HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { return hybridBayesNet_.at(index)->asMixture(); } diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 5c2e4f546d..fc40e02974 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridSmoother { const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; /// Get the Gaussian Mixture from the Bayes Net posterior at `index`. - GaussianMixture::shared_ptr gaussianMixture(size_t index) const; + HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const; /// Return the Bayes Net posterior. const HybridBayesNet& hybridBayesNet() const; diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index e5be00c6f7..834a067c31 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -65,7 +65,7 @@ virtual class HybridConditional { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const; - gtsam::GaussianMixture* asMixture() const; + gtsam::HybridGaussianConditional* asMixture() const; gtsam::GaussianConditional* asGaussian() const; gtsam::DiscreteConditional* asDiscrete() const; gtsam::Factor* inner(); @@ -84,9 +84,9 @@ class HybridGaussianFactor : gtsam::HybridFactor { gtsam::DefaultKeyFormatter) const; }; -#include -class GaussianMixture : gtsam::HybridFactor { - GaussianMixture(const gtsam::KeyVector& continuousFrontals, +#include +class HybridGaussianConditional : gtsam::HybridFactor { + HybridGaussianConditional(const gtsam::KeyVector& continuousFrontals, const gtsam::KeyVector& continuousParents, const gtsam::DiscreteKeys& discreteParents, const std::vector& @@ -97,7 +97,7 @@ class GaussianMixture : gtsam::HybridFactor { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; - void print(string s = "GaussianMixture\n", + void print(string s = "HybridGaussianConditional\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; @@ -131,7 +131,7 @@ class HybridBayesTree { #include class HybridBayesNet { HybridBayesNet(); - void push_back(const gtsam::GaussianMixture* s); + void push_back(const gtsam::HybridGaussianConditional* s); void push_back(const gtsam::GaussianConditional* s); void push_back(const gtsam::DiscreteConditional* s); diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 26b83db293..00af283085 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,7 +43,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - bayesNet.emplace_shared( + bayesNet.emplace_shared( KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index a379dd0362..116e1119d0 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -11,7 +11,7 @@ /** * @file testGaussianMixture.cpp - * @brief Unit tests for GaussianMixture class + * @brief Unit tests for HybridGaussianConditional class * @author Varun Agrawal * @author Fan Jiang * @author Frank Dellaert @@ -19,7 +19,7 @@ */ #include -#include +#include #include #include #include @@ -46,19 +46,19 @@ static const HybridValues hv1{vv, assignment1}; /* ************************************************************************* */ namespace equal_constants { -// Create a simple GaussianMixture +// Create a simple HybridGaussianConditional const double commonSigma = 2.0; const std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); } // namespace equal_constants /* ************************************************************************* */ /// Check that invariants hold -TEST(GaussianMixture, Invariants) { +TEST(HybridGaussianConditional, Invariants) { using namespace equal_constants; // Check that the mixture normalization constant is the max of all constants @@ -67,13 +67,13 @@ TEST(GaussianMixture, Invariants) { EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); - EXPECT(GaussianMixture::CheckInvariants(mixture, hv0)); - EXPECT(GaussianMixture::CheckInvariants(mixture, hv1)); + EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0)); + EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1)); } /* ************************************************************************* */ /// Check LogProbability. -TEST(GaussianMixture, LogProbability) { +TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; auto actual = mixture.logProbability(vv); @@ -95,7 +95,7 @@ TEST(GaussianMixture, LogProbability) { /* ************************************************************************* */ /// Check error. -TEST(GaussianMixture, Error) { +TEST(HybridGaussianConditional, Error) { using namespace equal_constants; auto actual = mixture.errorTree(vv); @@ -118,7 +118,7 @@ TEST(GaussianMixture, Error) { /* ************************************************************************* */ /// Check that the likelihood is proportional to the conditional density given /// the measurements. -TEST(GaussianMixture, Likelihood) { +TEST(HybridGaussianConditional, Likelihood) { using namespace equal_constants; // Compute likelihood @@ -147,19 +147,19 @@ TEST(GaussianMixture, Likelihood) { /* ************************************************************************* */ namespace mode_dependent_constants { -// Create a GaussianMixture with mode-dependent noise models. +// Create a HybridGaussianConditional with mode-dependent noise models. // 0 is low-noise, 1 is high-noise. const std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); } // namespace mode_dependent_constants /* ************************************************************************* */ // Create a test for continuousParents. -TEST(GaussianMixture, ContinuousParents) { +TEST(HybridGaussianConditional, ContinuousParents) { using namespace mode_dependent_constants; const KeyVector continuousParentKeys = mixture.continuousParents(); // Check that the continuous parent keys are correct: @@ -170,7 +170,7 @@ TEST(GaussianMixture, ContinuousParents) { /* ************************************************************************* */ /// Check that the likelihood is proportional to the conditional density given /// the measurements. -TEST(GaussianMixture, Likelihood2) { +TEST(HybridGaussianConditional, Likelihood2) { using namespace mode_dependent_constants; // Compute likelihood diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index cd20e72153..b90950605e 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include #include @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } /* ************************************************************************* */ -TEST(HybridGaussianFactor, GaussianMixture) { +TEST(HybridGaussianFactor, HybridGaussianConditional) { KeyVector keys; keys.push_back(X(0)); keys.push_back(X(1)); @@ -154,8 +154,8 @@ TEST(HybridGaussianFactor, GaussianMixture) { dKeys.emplace_back(M(1), 2); auto gaussians = std::make_shared(); - GaussianMixture::Conditionals conditionals(gaussians); - GaussianMixture gm({}, keys, dKeys, conditionals); + HybridGaussianConditional::Conditionals conditionals(gaussians); + HybridGaussianConditional gm({}, keys, dKeys, conditionals); EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size()); } @@ -229,7 +229,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; - hbn.emplace_shared(KeyVector{z}, KeyVector{}, + hbn.emplace_shared(KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); auto mixing = make_shared(m, "0.5/0.5"); @@ -413,7 +413,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, -I_1x1, model1); - auto motion = std::make_shared( + auto motion = std::make_shared( KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); hbn.push_back(motion); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 9b7ba97448..b35468302d 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -107,7 +107,7 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_shared( + bayesNet.emplace_shared( KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, std::vector{conditional0, conditional1}); bayesNet.emplace_shared(Asia, "99/1"); @@ -168,7 +168,7 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, std::vector{conditional0, conditional1}); // Create hybrid Bayes net. diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 406306df7c..78dbd314cb 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -43,9 +43,9 @@ TEST(HybridConditional, Invariants) { auto hc0 = bn.at(0); CHECK(hc0->isHybrid()); - // Check invariants as a GaussianMixture. + // Check invariants as a HybridGaussianConditional. const auto mixture = hc0->asMixture(); - EXPECT(GaussianMixture::CheckInvariants(*mixture, values)); + EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values)); // Check invariants as a HybridConditional. EXPECT(HybridConditional::CheckInvariants(*hc0, values)); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 2956339163..299562e325 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -616,7 +616,7 @@ TEST(HybridEstimation, ModeSelection) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, std::vector{GaussianConditional::sharedMeanAndStddev( Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), @@ -647,7 +647,7 @@ TEST(HybridEstimation, ModeSelection2) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, std::vector{GaussianConditional::sharedMeanAndStddev( Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 96d0c478ea..3386daac84 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -71,8 +71,8 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // graph - GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), - GaussianMixture::Conditionals( + HybridGaussianConditional gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + HybridGaussianConditional::Conditionals( M(0), std::make_shared( X(0), Z_3x1, I_3x3, X(1), I_3x3), @@ -681,7 +681,7 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, + hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. @@ -805,7 +805,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -830,7 +830,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, std::vector{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), @@ -862,7 +862,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(10.1379), I_1x1 * 2.02759), conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -899,7 +899,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = std::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - expectedBayesNet.emplace_shared( + expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, std::vector{conditional0, conditional1}); @@ -946,7 +946,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_shared( + bn.emplace_shared( KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), @@ -961,7 +961,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - auto gm = std::make_shared( + auto gm = std::make_shared( KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, std::vector{GaussianConditional::sharedMeanAndStddev( X(t), I_1x1, X(t - 1), Z_1x1, 0.2), diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index d2b0fded34..8b15b50d24 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -134,22 +134,22 @@ TEST(HybridGaussianElimination, IncrementalInference) { // The densities on X(0) should be the same auto x0_conditional = - dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); + auto expected_x0_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same auto x1_conditional = - dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = - dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( + dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); @@ -279,9 +279,9 @@ TEST(HybridGaussianElimination, Approx_inference) { // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. - auto &unprunedLastDensity = *dynamic_pointer_cast( + auto &unprunedLastDensity = *dynamic_pointer_cast( unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); - auto &lastDensity = *dynamic_pointer_cast( + auto &lastDensity = *dynamic_pointer_cast( incrementalHybrid[X(3)]->conditional()->inner()); std::vector> assignments = diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 9e2816865b..3f9a186538 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -350,7 +350,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { EliminateHybrid(factors, ordering); auto gaussianConditionalMixture = - dynamic_pointer_cast(hybridConditionalMixture->inner()); + dynamic_pointer_cast(hybridConditionalMixture->inner()); CHECK(gaussianConditionalMixture); // Frontals = [x0, x1] diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index e2ed4cbad1..06a4249d36 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -151,23 +151,23 @@ TEST(HybridNonlinearISAM, IncrementalInference) { .BaseEliminateable::eliminatePartialMultifrontal(ordering); // The densities on X(1) should be the same - auto x0_conditional = dynamic_pointer_cast( + auto x0_conditional = dynamic_pointer_cast( bayesTree[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( + auto expected_x0_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same - auto x1_conditional = dynamic_pointer_cast( + auto x1_conditional = dynamic_pointer_cast( bayesTree[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same - auto x2_conditional = dynamic_pointer_cast( + auto x2_conditional = dynamic_pointer_cast( bayesTree[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); @@ -300,9 +300,9 @@ TEST(HybridNonlinearISAM, Approx_inference) { // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. - auto &unprunedLastDensity = *dynamic_pointer_cast( + auto &unprunedLastDensity = *dynamic_pointer_cast( unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); - auto &lastDensity = *dynamic_pointer_cast( + auto &lastDensity = *dynamic_pointer_cast( bayesTree[X(3)]->conditional()->inner()); std::vector> assignments = diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 31fe61d9b7..17196d2ae9 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -18,7 +18,7 @@ #include #include -#include +#include #include #include #include @@ -59,12 +59,12 @@ BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_GaussianMixture"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, "gtsam_GaussianMixture_Conditionals"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, "gtsam_GaussianMixture_Conditionals_Leaf"); -BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice, +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice, "gtsam_GaussianMixture_Conditionals_Choice"); // Needed since GaussianConditional::FromMeanAndStddev uses it BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); @@ -106,20 +106,20 @@ TEST(HybridSerialization, HybridConditional) { } /* ****************************************************************************/ -// Test GaussianMixture serialization. -TEST(HybridSerialization, GaussianMixture) { +// Test HybridGaussianConditional serialization. +TEST(HybridSerialization, HybridGaussianConditional) { const DiscreteKey mode(M(0), 2); Matrix1 I = Matrix1::Identity(); const auto conditional0 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); - const GaussianMixture gm({Z(0)}, {X(0)}, {mode}, + const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); - EXPECT(equalsObj(gm)); - EXPECT(equalsXML(gm)); - EXPECT(equalsBinary(gm)); + EXPECT(equalsObj(gm)); + EXPECT(equalsXML(gm)); + EXPECT(equalsBinary(gm)); } /* ****************************************************************************/ diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 0ab8b49a43..c76c05ab15 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -46,7 +46,7 @@ namespace gtsam { * Gaussian density over a set of continuous variables. * - \b Discrete conditionals, implemented in \class DiscreteConditional, which * represent a discrete conditional distribution over discrete variables. - * - \b Hybrid conditional densities, such as \class GaussianMixture, which is + * - \b Hybrid conditional densities, such as \class HybridGaussianConditional, which is * a density over continuous variables given discrete/continuous parents. * - \b Symbolic factors, used to represent a graph structure, implemented in * \class SymbolicConditional. Only used for symbolic elimination etc. diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index bc685dec68..0fd5699089 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -18,8 +18,9 @@ from gtsam.utils.test_case import GtsamTestCase from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues, - GaussianConditional, GaussianMixture, HybridBayesNet, - HybridValues, VectorValues, noiseModel) + GaussianConditional, HybridBayesNet, + HybridGaussianConditional, HybridValues, VectorValues, + noiseModel) class TestHybridBayesNet(GtsamTestCase): @@ -49,7 +50,7 @@ def test_evaluate(self): bayesNet = HybridBayesNet() bayesNet.push_back(conditional) bayesNet.push_back( - GaussianMixture([X(1)], [], discrete_keys, + HybridGaussianConditional([X(1)], [], discrete_keys, [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 02a9b7b7d3..c7c4c6b51f 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -18,9 +18,9 @@ import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, - GaussianMixture, HybridBayesNet, HybridGaussianFactor, - HybridGaussianFactorGraph, HybridValues, JacobianFactor, - Ordering, noiseModel) + HybridBayesNet, HybridGaussianConditional, + HybridGaussianFactor, HybridGaussianFactorGraph, + HybridValues, JacobianFactor, Ordering, noiseModel) DEBUG_MARGINALS = False @@ -48,7 +48,7 @@ def test_create(self): self.assertEqual(hbn.size(), 2) mixture = hbn.at(0).inner() - self.assertIsInstance(mixture, GaussianMixture) + self.assertIsInstance(mixture, HybridGaussianConditional) self.assertEqual(len(mixture.keys()), 2) discrete_conditional = hbn.at(hbn.size() - 1).inner() @@ -106,7 +106,7 @@ def tiny(num_measurements: int = 1, I_1x1, X(0), [0], sigma=3) - bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys, + bayesNet.push_back(HybridGaussianConditional([Z(i)], [X(0)], keys, [conditional0, conditional1])) # Create prior on X(0). From 94805798e90d63a4a747d20a42c3afd199c3c431 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:59:09 -0400 Subject: [PATCH 4/7] rename files --- ...{GaussianMixture.cpp => HybridGaussianConditional.cpp} | 8 ++++---- .../{GaussianMixture.h => HybridGaussianConditional.h} | 0 ...GaussianMixtureFactor.cpp => HybridGaussianFactor.cpp} | 0 .../{GaussianMixtureFactor.h => HybridGaussianFactor.h} | 0 gtsam/hybrid/{MixtureFactor.h => HybridNonlinearFactor.h} | 0 ...ssianMixture.cpp => testHybridGaussianConditional.cpp} | 0 ...sianMixtureFactor.cpp => testHybridGaussianFactor.cpp} | 2 +- ...estMixtureFactor.cpp => testHybridNonlinearFactor.cpp} | 2 +- 8 files changed, 6 insertions(+), 6 deletions(-) rename gtsam/hybrid/{GaussianMixture.cpp => HybridGaussianConditional.cpp} (98%) rename gtsam/hybrid/{GaussianMixture.h => HybridGaussianConditional.h} (100%) rename gtsam/hybrid/{GaussianMixtureFactor.cpp => HybridGaussianFactor.cpp} (100%) rename gtsam/hybrid/{GaussianMixtureFactor.h => HybridGaussianFactor.h} (100%) rename gtsam/hybrid/{MixtureFactor.h => HybridNonlinearFactor.h} (100%) rename gtsam/hybrid/tests/{testGaussianMixture.cpp => testHybridGaussianConditional.cpp} (100%) rename gtsam/hybrid/tests/{testGaussianMixtureFactor.cpp => testHybridGaussianFactor.cpp} (99%) rename gtsam/hybrid/tests/{testMixtureFactor.cpp => testHybridNonlinearFactor.cpp} (98%) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp similarity index 98% rename from gtsam/hybrid/GaussianMixture.cpp rename to gtsam/hybrid/HybridGaussianConditional.cpp index 451493ae0e..4ad85b8273 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -72,7 +72,7 @@ HybridGaussianConditional::HybridGaussianConditional( /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from -// GaussianMixtureFactor, no? +// HybridGaussianFactor, no? GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -203,7 +203,7 @@ bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) cons } /* ************************************************************************* */ -std::shared_ptr HybridGaussianConditional::likelihood( +std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( @@ -212,7 +212,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); - const GaussianMixtureFactor::Factors likelihoods( + const HybridGaussianFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = @@ -231,7 +231,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( return std::make_shared(gfg); } }); - return std::make_shared( + return std::make_shared( continuousParentKeys, discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/HybridGaussianConditional.h similarity index 100% rename from gtsam/hybrid/GaussianMixture.h rename to gtsam/hybrid/HybridGaussianConditional.h diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp similarity index 100% rename from gtsam/hybrid/GaussianMixtureFactor.cpp rename to gtsam/hybrid/HybridGaussianFactor.cpp diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/HybridGaussianFactor.h similarity index 100% rename from gtsam/hybrid/GaussianMixtureFactor.h rename to gtsam/hybrid/HybridGaussianFactor.h diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h similarity index 100% rename from gtsam/hybrid/MixtureFactor.h rename to gtsam/hybrid/HybridNonlinearFactor.h diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp similarity index 100% rename from gtsam/hybrid/tests/testGaussianMixture.cpp rename to gtsam/hybrid/tests/testHybridGaussianConditional.cpp diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp similarity index 99% rename from gtsam/hybrid/tests/testGaussianMixtureFactor.cpp rename to gtsam/hybrid/tests/testHybridGaussianFactor.cpp index b90950605e..f9d2767606 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testGaussianMixtureFactor.cpp + * @file testHybridGaussianFactor.cpp * @brief Unit tests for HybridGaussianFactor * @author Varun Agrawal * @author Fan Jiang diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp similarity index 98% rename from gtsam/hybrid/tests/testMixtureFactor.cpp rename to gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index dead8cd346..325478e949 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testMixtureFactor.cpp + * @file testHybridNonlinearFactor.cpp * @brief Unit tests for HybridNonlinearFactor * @author Varun Agrawal * @date October 2022 From 035c92a38fc2d1113aa600106043663288b423a4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 05:59:56 -0400 Subject: [PATCH 5/7] finalize renaming --- doc/Hybrid.lyx | 2 +- gtsam/hybrid/HybridBayesNet.cpp | 6 +++--- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++-- gtsam/hybrid/HybridNonlinearFactor.h | 2 +- .../tests/testHybridGaussianConditional.cpp | 2 +- gtsam/hybrid/tests/testHybridNonlinearFactor.cpp | 6 +++--- gtsam/hybrid/tests/testSerializationHybrid.cpp | 16 ++++++++-------- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/Hybrid.lyx b/doc/Hybrid.lyx index 17d7094e10..a5fdd5d0ae 100644 --- a/doc/Hybrid.lyx +++ b/doc/Hybrid.lyx @@ -201,7 +201,7 @@ HybridGaussianConditional \emph default (maybe to be renamed to \emph on -GaussianMixtureComponent +HybridGaussianConditionalComponent \emph default ) just indexes into a number of \emph on diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 90aa6cde28..4f8d2350cd 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -182,11 +182,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedGaussianMixture = std::make_shared(*gm); - prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-( + auto prunedHybridGaussianConditional = std::make_shared(*gm); + prunedHybridGaussianConditional->prune(prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. - prunedBayesNetFragment.push_back(prunedGaussianMixture); + prunedBayesNetFragment.push_back(prunedHybridGaussianConditional); } else { // Add the non-HybridGaussianConditional conditional diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 656060ae66..4c65467082 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -343,7 +343,7 @@ static std::shared_ptr createDiscreteFactor( // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. -static std::shared_ptr createGaussianMixtureFactor( +static std::shared_ptr createHybridGaussianFactor( const DecisionTree &eliminationResults, const KeyVector &continuousSeparator, const DiscreteKeys &discreteSeparator) { @@ -405,7 +405,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto newFactor = continuousSeparator.empty() ? createDiscreteFactor(eliminationResults, discreteSeparator) - : createGaussianMixtureFactor(eliminationResults, continuousSeparator, + : createHybridGaussianFactor(eliminationResults, continuousSeparator, discreteSeparator); // Create the HybridGaussianConditional from the conditionals diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index d3d673dda3..7b2990cb07 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -188,7 +188,7 @@ class HybridNonlinearFactor : public HybridFactor { const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { std::cout << (s.empty() ? "" : s + " "); Base::print("", keyFormatter); - std::cout << "\nMixtureFactor\n"; + std::cout << "\nHybridNonlinearFactor\n"; auto valueFormatter = [](const sharedFactor& v) { if (v) { return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 116e1119d0..3f18259de3 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file testGaussianMixture.cpp + * @file testHybridGaussianConditional.cpp * @brief Unit tests for HybridGaussianConditional class * @author Varun Agrawal * @author Fan Jiang diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 325478e949..9e2794b19a 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -73,7 +73,7 @@ HybridNonlinearFactor } /* ************************************************************************* */ -static HybridNonlinearFactor getMixtureFactor() { +static HybridNonlinearFactor getHybridNonlinearFactor() { DiscreteKey m1(1, 2); double between0 = 0.0; @@ -94,7 +94,7 @@ static HybridNonlinearFactor getMixtureFactor() { /* ************************************************************************* */ // Test the error of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Error) { - auto mixtureFactor = getMixtureFactor(); + auto mixtureFactor = getHybridNonlinearFactor(); Values continuousValues; continuousValues.insert(X(1), 0); @@ -114,7 +114,7 @@ TEST(HybridNonlinearFactor, Error) { /* ************************************************************************* */ // Test dim of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Dim) { - auto mixtureFactor = getMixtureFactor(); + auto mixtureFactor = getHybridNonlinearFactor(); EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 17196d2ae9..76d9191127 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -51,21 +51,21 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") -BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_GaussianMixtureFactor"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors, - "gtsam_GaussianMixtureFactor_Factors"); + "gtsam_HybridGaussianFactor_Factors"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, - "gtsam_GaussianMixtureFactor_Factors_Leaf"); + "gtsam_HybridGaussianFactor_Factors_Leaf"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, - "gtsam_GaussianMixtureFactor_Factors_Choice"); + "gtsam_HybridGaussianFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_GaussianMixture"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_HybridGaussianConditional"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, - "gtsam_GaussianMixture_Conditionals"); + "gtsam_HybridGaussianConditional_Conditionals"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, - "gtsam_GaussianMixture_Conditionals_Leaf"); + "gtsam_HybridGaussianConditional_Conditionals_Leaf"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice, - "gtsam_GaussianMixture_Conditionals_Choice"); + "gtsam_HybridGaussianConditional_Conditionals_Choice"); // Needed since GaussianConditional::FromMeanAndStddev uses it BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); From 629989f9eeabb90425f4f95eac187e98a5affdfb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 06:20:46 -0400 Subject: [PATCH 6/7] formatting --- gtsam/hybrid/HybridBayesNet.cpp | 6 +- gtsam/hybrid/HybridConditional.cpp | 4 +- gtsam/hybrid/HybridConditional.h | 11 +-- gtsam/hybrid/HybridEliminationTree.h | 4 +- gtsam/hybrid/HybridGaussianConditional.cpp | 34 ++++++---- gtsam/hybrid/HybridGaussianConditional.h | 18 ++--- gtsam/hybrid/HybridGaussianFactor.cpp | 6 +- gtsam/hybrid/HybridGaussianFactor.h | 13 ++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 8 +-- gtsam/hybrid/HybridGaussianFactorGraph.h | 3 +- gtsam/hybrid/HybridJunctionTree.h | 5 +- gtsam/hybrid/HybridNonlinearFactor.h | 17 ++--- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 5 +- gtsam/hybrid/HybridNonlinearISAM.h | 1 + gtsam/hybrid/hybrid.i | 51 +++++++------- gtsam/hybrid/tests/Switching.h | 6 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- .../hybrid/tests/testHybridGaussianFactor.cpp | 6 +- .../tests/testHybridGaussianFactorGraph.cpp | 23 ++++--- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 67 ++++++++++--------- .../tests/testHybridNonlinearFactor.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 12 ++-- .../hybrid/tests/testHybridNonlinearISAM.cpp | 55 ++++++++------- .../hybrid/tests/testSerializationHybrid.cpp | 9 +-- python/gtsam/tests/test_HybridBayesNet.py | 2 +- python/gtsam/tests/test_HybridFactorGraph.py | 12 ++-- .../tests/test_HybridNonlinearFactorGraph.py | 6 +- python/gtsam/tests/test_HybridValues.py | 3 +- 28 files changed, 212 insertions(+), 179 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 4f8d2350cd..14a54c0b40 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -182,8 +182,10 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { for (auto &&conditional : *this) { if (auto gm = conditional->asMixture()) { // Make a copy of the Gaussian mixture and prune it! - auto prunedHybridGaussianConditional = std::make_shared(*gm); - prunedHybridGaussianConditional->prune(prunedDiscreteProbs); // imperative :-( + auto prunedHybridGaussianConditional = + std::make_shared(*gm); + prunedHybridGaussianConditional->prune( + prunedDiscreteProbs); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back(prunedHybridGaussianConditional); diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 5ef74ea7de..8a8511aef9 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -157,10 +157,10 @@ double HybridConditional::logNormalizationConstant() const { return gc->logNormalizationConstant(); } if (auto gm = asMixture()) { - return gm->logNormalizationConstant(); // 0.0! + return gm->logNormalizationConstant(); // 0.0! } if (auto dc = asDiscrete()) { - return dc->logNormalizationConstant(); // 0.0! + return dc->logNormalizationConstant(); // 0.0! } throw std::runtime_error( "HybridConditional::logProbability: conditional type not handled"); diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index d02b69127b..fb51e95f67 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -18,8 +18,8 @@ #pragma once #include -#include #include +#include #include #include #include @@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional(const std::shared_ptr& gaussianMixture); + HybridConditional( + const std::shared_ptr& gaussianMixture); /// @} /// @name Testable @@ -222,8 +223,10 @@ class GTSAM_EXPORT HybridConditional boost::serialization::void_cast_register( static_cast(NULL), static_cast(NULL)); } else { - boost::serialization::void_cast_register( - static_cast(NULL), static_cast(NULL)); + boost::serialization::void_cast_register( + static_cast(NULL), + static_cast(NULL)); } } #endif diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index e1eb1764aa..543e09c6f0 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -35,8 +35,8 @@ class GTSAM_EXPORT HybridEliminationTree public: typedef EliminationTree - Base; ///< Base class - typedef HybridEliminationTree This; ///< This class + Base; ///< Base class + typedef HybridEliminationTree This; ///< This class typedef std::shared_ptr shared_ptr; ///< Shared pointer to this class /// @name Constructors diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4ad85b8273..42e7021556 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -50,7 +50,8 @@ HybridGaussianConditional::HybridGaussianConditional( } /* *******************************************************************************/ -const HybridGaussianConditional::Conditionals &HybridGaussianConditional::conditionals() const { +const HybridGaussianConditional::Conditionals & +HybridGaussianConditional::conditionals() const { return conditionals_; } @@ -59,20 +60,22 @@ HybridGaussianConditional::HybridGaussianConditional( KeyVector &&continuousFrontals, KeyVector &&continuousParents, DiscreteKeys &&discreteParents, std::vector &&conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, - Conditionals(discreteParents, conditionals)) {} + : HybridGaussianConditional(continuousFrontals, continuousParents, + discreteParents, + Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional( const KeyVector &continuousFrontals, const KeyVector &continuousParents, const DiscreteKeys &discreteParents, const std::vector &conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, discreteParents, - Conditionals(discreteParents, conditionals)) {} + : HybridGaussianConditional(continuousFrontals, continuousParents, + discreteParents, + Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ -// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be derived from -// HybridGaussianFactor, no? +// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be +// derived from HybridGaussianFactor, no? GaussianFactorGraphTree HybridGaussianConditional::add( const GaussianFactorGraphTree &sum) const { using Y = GaussianFactorGraph; @@ -86,7 +89,8 @@ GaussianFactorGraphTree HybridGaussianConditional::add( } /* *******************************************************************************/ -GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() const { +GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() + const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { @@ -131,7 +135,8 @@ GaussianConditional::shared_ptr HybridGaussianConditional::operator()( } /* *******************************************************************************/ -bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const { +bool HybridGaussianConditional::equals(const HybridFactor &lf, + double tol) const { const This *e = dynamic_cast(&lf); if (e == nullptr) return false; @@ -150,7 +155,7 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf, double tol) const /* *******************************************************************************/ void HybridGaussianConditional::print(const std::string &s, - const KeyFormatter &formatter) const { + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous()) std::cout << "Continuous "; if (isDiscrete()) std::cout << "Discrete "; @@ -193,7 +198,8 @@ KeyVector HybridGaussianConditional::continuousParents() const { } /* ************************************************************************* */ -bool HybridGaussianConditional::allFrontalsGiven(const VectorValues &given) const { +bool HybridGaussianConditional::allFrontalsGiven( + const VectorValues &given) const { for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; @@ -207,7 +213,8 @@ std::shared_ptr HybridGaussianConditional::likelihood( const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( - "HybridGaussianConditional::likelihood: given values are missing some frontals."); + "HybridGaussianConditional::likelihood: given values are missing some " + "frontals."); } const DiscreteKeys discreteParentKeys = discreteKeys(); @@ -365,7 +372,8 @@ double HybridGaussianConditional::error(const HybridValues &values) const { } /* *******************************************************************************/ -double HybridGaussianConditional::logProbability(const HybridValues &values) const { +double HybridGaussianConditional::logProbability( + const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 87ddb2cb81..fc315c6f9e 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -23,8 +23,8 @@ #include #include #include -#include #include +#include #include #include @@ -102,9 +102,9 @@ class GTSAM_EXPORT HybridGaussianConditional * discreteParents will be used as the labels in the decision tree. */ HybridGaussianConditional(const KeyVector &continuousFrontals, - const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const Conditionals &conditionals); + const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const Conditionals &conditionals); /** * @brief Make a Gaussian Mixture from a list of Gaussian conditionals @@ -114,9 +114,10 @@ class GTSAM_EXPORT HybridGaussianConditional * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - HybridGaussianConditional(KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals); + HybridGaussianConditional( + KeyVector &&continuousFrontals, KeyVector &&continuousParents, + DiscreteKeys &&discreteParents, + std::vector &&conditionals); /** * @brief Make a Gaussian Mixture from a list of Gaussian conditionals @@ -277,6 +278,7 @@ std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits template <> -struct traits : public Testable {}; +struct traits + : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 4445d875f3..f7207bf81d 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -30,8 +30,8 @@ namespace gtsam { /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors) + const DiscreteKeys &discreteKeys, + const Factors &factors) : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ @@ -53,7 +53,7 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void HybridGaussianFactor::print(const std::string &s, - const KeyFormatter &formatter) const { + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); std::cout << "HybridGaussianFactor" << std::endl; HybridFactor::print("", formatter); diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 9915e24e67..650f7a5a7c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -84,8 +84,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * as the mixture density. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors); + const DiscreteKeys &discreteKeys, + const Factors &factors); /** * @brief Construct a new HybridGaussianFactor object using a vector of @@ -96,10 +96,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @param factors Vector of gaussian factor shared pointers. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const std::vector &factors) + const DiscreteKeys &discreteKeys, + const std::vector &factors) : HybridGaussianFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable @@ -168,7 +168,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { // traits template <> -struct traits : public Testable { -}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 4c65467082..900ec90c78 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -23,11 +23,11 @@ #include #include #include -#include -#include #include #include #include +#include +#include #include #include #include @@ -363,7 +363,7 @@ static std::shared_ptr createHybridGaussianFactor( correct); return std::make_shared(continuousSeparator, - discreteSeparator, newFactors); + discreteSeparator, newFactors); } static std::pair> @@ -406,7 +406,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, continuousSeparator.empty() ? createDiscreteFactor(eliminationResults, discreteSeparator) : createHybridGaussianFactor(eliminationResults, continuousSeparator, - discreteSeparator); + discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index e6ce4467cc..911058437d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -18,9 +18,9 @@ #pragma once -#include #include #include +#include #include #include #include @@ -221,7 +221,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// Get the GaussianFactorGraph at a given discrete assignment. GaussianFactorGraph operator()(const DiscreteValues& assignment) const; - }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridJunctionTree.h b/gtsam/hybrid/HybridJunctionTree.h index a197e6e3c5..17b4649fc3 100644 --- a/gtsam/hybrid/HybridJunctionTree.h +++ b/gtsam/hybrid/HybridJunctionTree.h @@ -51,11 +51,10 @@ class HybridEliminationTree; */ class GTSAM_EXPORT HybridJunctionTree : public JunctionTree { - public: typedef JunctionTree - Base; ///< Base class - typedef HybridJunctionTree This; ///< This class + Base; ///< Base class + typedef HybridJunctionTree This; ///< This class typedef std::shared_ptr shared_ptr; ///< Shared pointer to this class /** diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 7b2990cb07..f5d9cd356a 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -75,7 +75,7 @@ class HybridNonlinearFactor : public HybridFactor { * normalized. */ HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const Factors& factors, bool normalized = false) + const Factors& factors, bool normalized = false) : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} /** @@ -96,8 +96,8 @@ class HybridNonlinearFactor : public HybridFactor { */ template HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const std::vector>& factors, - bool normalized = false) + const std::vector>& factors, + bool normalized = false) : Base(keys, discreteKeys), normalized_(normalized) { std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); @@ -201,13 +201,14 @@ class HybridNonlinearFactor : public HybridFactor { /// Check equality bool equals(const HybridFactor& other, double tol = 1e-9) const override { - // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If it - // fails, return false. + // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If + // it fails, return false. if (!dynamic_cast(&other)) return false; - // If the cast is successful, we'll properly construct a HybridNonlinearFactor - // object from `other` - const HybridNonlinearFactor& f(static_cast(other)); + // If the cast is successful, we'll properly construct a + // HybridNonlinearFactor object from `other` + const HybridNonlinearFactor& f( + static_cast(other)); // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`. auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 76703ad2df..78370a157a 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -20,8 +20,8 @@ #include #include #include -#include #include +#include #include namespace gtsam { @@ -80,7 +80,8 @@ void HybridNonlinearFactorGraph::printErrors( gmf->errorTree(values.continuous()).print("", keyFormatter); std::cout << std::endl; } - } else if (auto gm = std::dynamic_pointer_cast(factor)) { + } else if (auto gm = std::dynamic_pointer_cast( + factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; diff --git a/gtsam/hybrid/HybridNonlinearISAM.h b/gtsam/hybrid/HybridNonlinearISAM.h index 3372593be9..e41b4ebe1b 100644 --- a/gtsam/hybrid/HybridNonlinearISAM.h +++ b/gtsam/hybrid/HybridNonlinearISAM.h @@ -19,6 +19,7 @@ #include #include + #include namespace gtsam { diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 834a067c31..686ce60ac2 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -10,26 +10,26 @@ class HybridValues { gtsam::DiscreteValues discrete() const; HybridValues(); - HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv); + HybridValues(const gtsam::VectorValues& cv, const gtsam::DiscreteValues& dv); void print(string s = "HybridValues", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; bool equals(const gtsam::HybridValues& other, double tol) const; - + void insert(gtsam::Key j, int value); void insert(gtsam::Key j, const gtsam::Vector& value); - + void insert_or_assign(gtsam::Key j, const gtsam::Vector& value); void insert_or_assign(gtsam::Key j, size_t value); void insert(const gtsam::VectorValues& values); void insert(const gtsam::DiscreteValues& values); void insert(const gtsam::HybridValues& values); - + void update(const gtsam::VectorValues& values); void update(const gtsam::DiscreteValues& values); void update(const gtsam::HybridValues& values); - + size_t& atDiscrete(gtsam::Key j); gtsam::Vector& at(gtsam::Key j); }; @@ -42,7 +42,7 @@ virtual class HybridFactor : gtsam::Factor { bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const; // Standard interface: - double error(const gtsam::HybridValues &values) const; + double error(const gtsam::HybridValues& values) const; bool isDiscrete() const; bool isContinuous() const; bool isHybrid() const; @@ -86,11 +86,12 @@ class HybridGaussianFactor : gtsam::HybridFactor { #include class HybridGaussianConditional : gtsam::HybridFactor { - HybridGaussianConditional(const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, - const gtsam::DiscreteKeys& discreteParents, - const std::vector& - conditionalsList); + HybridGaussianConditional( + const gtsam::KeyVector& continuousFrontals, + const gtsam::KeyVector& continuousParents, + const gtsam::DiscreteKeys& discreteParents, + const std::vector& + conditionalsList); gtsam::HybridGaussianFactor* likelihood( const gtsam::VectorValues& frontals) const; @@ -139,7 +140,7 @@ class HybridBayesNet { size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; - + // Standard interface: double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; @@ -149,7 +150,7 @@ class HybridBayesNet { const gtsam::VectorValues& measurements) const; gtsam::HybridValues optimize() const; - gtsam::HybridValues sample(const gtsam::HybridValues &given) const; + gtsam::HybridValues sample(const gtsam::HybridValues& given) const; gtsam::HybridValues sample() const; void print(string s = "HybridBayesNet\n", @@ -189,7 +190,8 @@ class HybridGaussianFactorGraph { const gtsam::HybridFactor* at(size_t i) const; void print(string s = "") const; - bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const; + bool equals(const gtsam::HybridGaussianFactorGraph& fg, + double tol = 1e-9) const; // evaluation double error(const gtsam::HybridValues& values) const; @@ -222,7 +224,8 @@ class HybridNonlinearFactorGraph { void push_back(gtsam::HybridFactor* factor); void push_back(gtsam::NonlinearFactor* factor); void push_back(gtsam::DiscreteFactor* factor); - gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const; + gtsam::HybridGaussianFactorGraph linearize( + const gtsam::Values& continuousValues) const; bool empty() const; void remove(size_t i); @@ -231,8 +234,8 @@ class HybridNonlinearFactorGraph { const gtsam::HybridFactor* at(size_t i) const; void print(string s = "HybridNonlinearFactorGraph\n", - const gtsam::KeyFormatter& keyFormatter = - gtsam::DefaultKeyFormatter) const; + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; }; #include @@ -243,18 +246,18 @@ class HybridNonlinearFactor : gtsam::HybridFactor { bool normalized = false); template - HybridNonlinearFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, - const std::vector& factors, - bool normalized = false); + HybridNonlinearFactor(const gtsam::KeyVector& keys, + const gtsam::DiscreteKeys& discreteKeys, + const std::vector& factors, + bool normalized = false); double error(const gtsam::Values& continuousValues, const gtsam::DiscreteValues& discreteValues) const; - double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, - const gtsam::Values& values) const; + double nonlinearFactorLogNormalizingConstant( + const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const; - HybridGaussianFactor* linearize( - const gtsam::Values& continuousValues) const; + HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const; void print(string s = "HybridNonlinearFactor\n", const gtsam::KeyFormatter& keyFormatter = diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 0e8680aa91..f08d29fd5c 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -21,8 +21,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -60,9 +60,9 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( hfg.add(HybridGaussianFactor( {keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}}, {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Z_3x1), + I_3x3, Z_3x1), std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Vector3::Ones())})); + I_3x3, Vector3::Ones())})); if (t > 1) { hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 299562e325..308daca9aa 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -19,10 +19,10 @@ #include #include #include +#include #include #include #include -#include #include #include #include diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index f9d2767606..847a38fa99 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -20,9 +20,9 @@ #include #include +#include #include #include -#include #include #include #include @@ -229,8 +229,8 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; - hbn.emplace_shared(KeyVector{z}, KeyVector{}, - DiscreteKeys{m}, std::vector{c0, c1}); + hbn.emplace_shared( + KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); auto mixing = make_shared(m, "0.5/0.5"); hbn.push_back(mixing); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 3386daac84..acd7b280f1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -21,12 +21,12 @@ #include #include #include -#include -#include #include #include #include #include +#include +#include #include #include #include @@ -71,13 +71,14 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // graph - HybridGaussianConditional gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), - HybridGaussianConditional::Conditionals( - M(0), - std::make_shared( - X(0), Z_3x1, I_3x3, X(1), I_3x3), - std::make_shared( - X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); + HybridGaussianConditional gm( + {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + HybridGaussianConditional::Conditionals( + M(0), + std::make_shared(X(0), Z_3x1, I_3x3, X(1), + I_3x3), + std::make_shared(X(0), Vector3::Ones(), I_3x3, + X(1), I_3x3))); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -681,8 +682,8 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, - DiscreteKeys{m1}, std::vector{c0, c1}); + hbn.emplace_shared( + KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 8b15b50d24..d0a4a79af5 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -126,31 +126,34 @@ TEST(HybridGaussianElimination, IncrementalInference) { /********************************************************/ // Run batch elimination so we can compare results. - const Ordering ordering {X(0), X(1), X(2)}; + const Ordering ordering{X(0), X(1), X(2)}; // Now we calculate the expected factors using full elimination const auto [expectedHybridBayesTree, expectedRemainingGraph] = switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); // The densities on X(0) should be the same - auto x0_conditional = - dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + auto x0_conditional = dynamic_pointer_cast( + isam[X(0)]->conditional()->inner()); + auto expected_x0_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same - auto x1_conditional = - dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + auto x1_conditional = dynamic_pointer_cast( + isam[X(1)]->conditional()->inner()); + auto expected_x1_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same - auto x2_conditional = - dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + auto x2_conditional = dynamic_pointer_cast( + isam[X(2)]->conditional()->inner()); + auto expected_x2_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. @@ -381,11 +384,11 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add connecting poses similar to PoseFactors in GTD fg.emplace_shared>(X(0), Y(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(0), Z(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(0), W(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); // Create initial estimate Values initial; @@ -414,9 +417,9 @@ TEST(HybridGaussianISAM, NonTrivial) { KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), - noise_model), + noise_model), moving = std::make_shared(W(0), W(1), odometry, - noise_model); + noise_model); std::vector components = {moving, still}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); @@ -424,14 +427,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(1), Y(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(1), Z(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(1), W(1), Pose2(-1, 1, 0), - poseNoise); + poseNoise); initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); @@ -454,7 +457,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(1), W(2)}; still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; @@ -464,14 +467,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(2), Y(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(2), Z(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(2), W(2), Pose2(-2, 1, 0), - poseNoise); + poseNoise); initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); @@ -497,7 +500,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(2), W(3)}; still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; @@ -507,14 +510,14 @@ TEST(HybridGaussianISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=3 fg.emplace_shared>(X(3), Y(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(3), Z(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(3), W(3), Pose2(-3, 1, 0), - poseNoise); + poseNoise); initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index 9e2794b19a..71d5108a05 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -20,8 +20,8 @@ #include #include #include -#include #include +#include #include #include diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3f9a186538..fd3494bea3 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -23,8 +23,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -150,8 +150,8 @@ TEST(HybridGaussianFactorGraph, Resize) { } /*************************************************************************** - * Test that the HybridNonlinearFactor reports correctly if the number of continuous - * keys provided do not match the keys in the factors. + * Test that the HybridNonlinearFactor reports correctly if the number of + * continuous keys provided do not match the keys in the factors. */ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { auto nonlinearFactor = std::make_shared>( @@ -350,7 +350,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { EliminateHybrid(factors, ordering); auto gaussianConditionalMixture = - dynamic_pointer_cast(hybridConditionalMixture->inner()); + dynamic_pointer_cast( + hybridConditionalMixture->inner()); CHECK(gaussianConditionalMixture); // Frontals = [x0, x1] @@ -413,7 +414,8 @@ TEST(HybridFactorGraph, PrintErrors) { // fg.print(); // std::cout << "\n\n\n" << std::endl; // fg.printErrors( - // HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint)); + // HybridValues(hv.continuous(), DiscreteValues(), + // self.linearizationPoint)); } /**************************************************************************** diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 06a4249d36..f4054c11ab 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -143,7 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { /********************************************************/ // Run batch elimination so we can compare results. - const Ordering ordering {X(0), X(1), X(2)}; + const Ordering ordering{X(0), X(1), X(2)}; // Now we calculate the actual factors using full elimination const auto [expectedHybridBayesTree, expectedRemainingGraph] = @@ -153,22 +153,25 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // The densities on X(1) should be the same auto x0_conditional = dynamic_pointer_cast( bayesTree[X(0)]->conditional()->inner()); - auto expected_x0_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + auto expected_x0_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast( bayesTree[X(1)]->conditional()->inner()); - auto expected_x1_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + auto expected_x1_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast( bayesTree[X(2)]->conditional()->inner()); - auto expected_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + auto expected_x2_conditional = + dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. @@ -410,11 +413,11 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add connecting poses similar to PoseFactors in GTD fg.emplace_shared>(X(0), Y(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(0), Z(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(0), W(0), Pose2(0, 1.0, 0), - poseNoise); + poseNoise); // Create initial estimate Values initial; @@ -433,9 +436,9 @@ TEST(HybridNonlinearISAM, NonTrivial) { KeyVector contKeys = {W(0), W(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto still = std::make_shared(W(0), W(1), Pose2(0, 0, 0), - noise_model), + noise_model), moving = std::make_shared(W(0), W(1), odometry, - noise_model); + noise_model); std::vector components = {moving, still}; auto mixtureFactor = std::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); @@ -443,14 +446,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(1), Y(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(1), Z(1), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(1), W(1), Pose2(-1, 1, 0), - poseNoise); + poseNoise); initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); @@ -473,7 +476,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(1), W(2)}; still = std::make_shared(W(1), W(2), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {moving, still}; @@ -483,14 +486,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=1 fg.emplace_shared>(X(2), Y(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(2), Z(2), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(2), W(2), Pose2(-2, 1, 0), - poseNoise); + poseNoise); initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); @@ -516,7 +519,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add odometry factor with discrete modes. contKeys = {W(2), W(3)}; still = std::make_shared(W(2), W(3), Pose2(0, 0, 0), - noise_model); + noise_model); moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {moving, still}; @@ -526,14 +529,14 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), - poseNoise); + poseNoise); // PoseFactors-like at k=3 fg.emplace_shared>(X(3), Y(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Y(3), Z(3), Pose2(0, 1, 0), - poseNoise); + poseNoise); fg.emplace_shared>(Z(3), W(3), Pose2(-3, 1, 0), - poseNoise); + poseNoise); initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 76d9191127..365dfcc9fc 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -18,11 +18,11 @@ #include #include -#include -#include #include #include #include +#include +#include #include #include @@ -59,7 +59,8 @@ BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, "gtsam_HybridGaussianFactor_Factors_Choice"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, "gtsam_HybridGaussianConditional"); +BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, + "gtsam_HybridGaussianConditional"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals, "gtsam_HybridGaussianConditional_Conditionals"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf, @@ -115,7 +116,7 @@ TEST(HybridSerialization, HybridGaussianConditional) { const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, - {conditional0, conditional1}); + {conditional0, conditional1}); EXPECT(equalsObj(gm)); EXPECT(equalsXML(gm)); diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index 0fd5699089..6e50c2bfe0 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -51,7 +51,7 @@ def test_evaluate(self): bayesNet.push_back(conditional) bayesNet.push_back( HybridGaussianConditional([X(1)], [], discrete_keys, - [conditional0, conditional1])) + [conditional0, conditional1])) bayesNet.push_back(DiscreteConditional(Asia, "99/1")) # Create values at which to evaluate. diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index c7c4c6b51f..f82665c495 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -27,6 +27,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" + def test_create(self): """Test construction of hybrid factor graph.""" model = noiseModel.Unit.Create(3) @@ -106,8 +107,9 @@ def tiny(num_measurements: int = 1, I_1x1, X(0), [0], sigma=3) - bayesNet.push_back(HybridGaussianConditional([Z(i)], [X(0)], keys, - [conditional0, conditional1])) + bayesNet.push_back( + HybridGaussianConditional([Z(i)], [X(0)], keys, + [conditional0, conditional1])) # Create prior on X(0). prior_on_x0 = GaussianConditional.FromMeanAndStddev( @@ -219,9 +221,9 @@ def unnormalized_posterior(x): # Check ratio between unnormalized posterior and factor graph is the same for all modes: for mode in [1, 0]: values.insert_or_assign(M(0), mode) - self.assertAlmostEqual(bayesNet.evaluate(values) / - np.exp(-fg.error(values)), - 0.6366197723675815) + self.assertAlmostEqual( + bayesNet.evaluate(values) / np.exp(-fg.error(values)), + 0.6366197723675815) self.assertAlmostEqual(bayesNet.error(values), fg.error(values)) # Test elimination. diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 3c2a70f734..3a659c4e87 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -24,12 +24,14 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" + def test_nonlinear_hybrid(self): nlfg = gtsam.HybridNonlinearFactorGraph() dk = gtsam.DiscreteKeys() dk.push_back((10, 2)) - nlfg.push_back(BetweenFactorPoint3(1, 2, Point3( - 1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1]))) + nlfg.push_back( + BetweenFactorPoint3(1, 2, Point3(1, 2, 3), + noiseModel.Diagonal.Variances([1, 1, 1]))) nlfg.push_back( PriorFactorPoint3(2, Point3(1, 2, 3), noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py index 8a54d518ce..73c27a4cde 100644 --- a/python/gtsam/tests/test_HybridValues.py +++ b/python/gtsam/tests/test_HybridValues.py @@ -14,11 +14,12 @@ import unittest -import gtsam import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase +import gtsam + class TestHybridValues(GtsamTestCase): """Unit tests for HybridValues.""" From 8da15fd3e02ce41ba2f152d3522a0a64306533a0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 13 Sep 2024 13:40:22 -0400 Subject: [PATCH 7/7] update after merge --- gtsam/hybrid/tests/testHybridGaussianFactor.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index e1b4e46f4b..0b40ecc724 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -399,17 +399,15 @@ void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { } /// Create hybrid motion model p(x1 | x0, m1) -static GaussianMixture::shared_ptr CreateHybridMotionModel(double mu0, - double mu1, - double sigma0, - double sigma1) { +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( + double mu0, double mu1, double sigma0, double sigma1) { auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); auto c0 = make_shared(X(1), Vector1(mu0), I_1x1, X(0), -I_1x1, model0), c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); - return std::make_shared( + return std::make_shared( KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); } @@ -621,7 +619,7 @@ TEST(HybridGaussianFactor, TwoStateModel2) { * measurements and vastly different motion model: either stand still or move * far. This yields a very informative posterior. */ -TEST(GaussianMixtureFactor, TwoStateModel3) { +TEST(HybridGaussianFactor, TwoStateModel3) { using namespace test_two_state_estimation; double mu0 = 0.0, mu1 = 10.0;