diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 14a54c0b40..6e96afb257 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -49,7 +49,7 @@ std::function &, double)> prunerFunc( const DecisionTreeFactor &prunedDiscreteProbs, const HybridConditional &conditional) { // Get the discrete keys as sets for the decision tree - // and the Gaussian mixture. + // and the hybrid Gaussian conditional. std::set discreteProbsKeySet = DiscreteKeysAsSet(prunedDiscreteProbs.discreteKeys()); std::set conditionalKeySet = @@ -63,7 +63,7 @@ std::function &, double)> prunerFunc( // typecast so we can use this to get probability value DiscreteValues values(choices); - // Case where the Gaussian mixture has the same + // Case where the hybrid Gaussian conditional has the same // discrete keys as the decision tree. if (conditionalKeySet == discreteProbsKeySet) { if (prunedDiscreteProbs(values) == 0) { @@ -180,8 +180,8 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Go through all the conditionals in the // Bayes Net and prune them as per prunedDiscreteProbs. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { - // Make a copy of the Gaussian mixture and prune it! + if (auto gm = conditional->asHybrid()) { + // Make a copy of the hybrid Gaussian conditional and prune it! auto prunedHybridGaussianConditional = std::make_shared(*gm); prunedHybridGaussianConditional->prune( @@ -204,7 +204,7 @@ GaussianBayesNet HybridBayesNet::choose( const DiscreteValues &assignment) const { GaussianBayesNet gbn; for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, select based on assignment. gbn.push_back((*gm)(assignment)); } else if (auto gc = conditional->asGaussian()) { @@ -291,7 +291,7 @@ AlgebraicDecisionTree HybridBayesNet::errorTree( // Iterate over each conditional. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, compute error for all assignments. result = result + gm->errorTree(continuousValues); @@ -321,7 +321,7 @@ AlgebraicDecisionTree HybridBayesNet::logProbability( // Iterate over each conditional. for (auto &&conditional : *this) { - if (auto gm = conditional->asMixture()) { + if (auto gm = conditional->asHybrid()) { // If conditional is hybrid, select based on assignment and compute // logProbability. result = result + gm->logProbability(continuousValues); @@ -369,7 +369,7 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph( if (conditional->frontalsIn(measurements)) { if (auto gc = conditional->asGaussian()) { fg.push_back(gc->likelihood(measurements)); - } else if (auto gm = conditional->asMixture()) { + } else if (auto gm = conditional->asHybrid()) { fg.push_back(gm->likelihood(measurements)); } else { throw std::runtime_error("Unknown conditional type"); diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 5c453c1065..d5d50afd29 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -28,7 +28,8 @@ namespace gtsam { /** * A hybrid Bayes net is a collection of HybridConditionals, which can have - * discrete conditionals, Gaussian mixtures, or pure Gaussian conditionals. + * discrete conditionals, hybrid Gaussian conditionals, + * or pure Gaussian conditionals. * * @ingroup hybrid */ diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index da2645b5a6..9aee6dcf81 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -109,7 +109,7 @@ struct HybridAssignmentData { GaussianConditional::shared_ptr conditional; if (hybrid_conditional->isHybrid()) { - conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); + conditional = (*hybrid_conditional->asHybrid())(parentData.assignment_); } else if (hybrid_conditional->isContinuous()) { conditional = hybrid_conditional->asGaussian(); } else { @@ -205,9 +205,9 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) { // If conditional is hybrid, we prune it. if (conditional->isHybrid()) { - auto gaussianMixture = conditional->asMixture(); + auto hybridGaussianCond = conditional->asHybrid(); - gaussianMixture->prune(parentData.prunedDiscreteProbs); + hybridGaussianCond->prune(parentData.prunedDiscreteProbs); } return parentData; } diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index ab2d8a73d2..0fe9ca6eac 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -99,8 +99,8 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** * @brief Recursively optimize the BayesTree to produce a vector solution. * - * @param assignment The discrete values assignment to select the Gaussian - * mixtures. + * @param assignment The discrete values assignment to select + * the hybrid conditional. * @return VectorValues */ VectorValues optimize(const DiscreteValues& assignment) const; @@ -170,7 +170,7 @@ class BayesTreeOrphanWrapper : public HybridConditional { void print( const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const override { - clique->print(s + "stored clique", formatter); + clique->print(s + " stored clique ", formatter); } }; diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index ed8125c2b7..074534b8d5 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -50,11 +50,11 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - const std::shared_ptr &gaussianMixture) - : BaseFactor(gaussianMixture->continuousKeys(), - gaussianMixture->discreteKeys()), - BaseConditional(gaussianMixture->nrFrontals()) { - inner_ = gaussianMixture; + const std::shared_ptr &hybridGaussianCond) + : BaseFactor(hybridGaussianCond->continuousKeys(), + hybridGaussianCond->discreteKeys()), + BaseConditional(hybridGaussianCond->nrFrontals()) { + inner_ = hybridGaussianCond; } /* ************************************************************************ */ @@ -97,8 +97,8 @@ void HybridConditional::print(const std::string &s, bool HybridConditional::equals(const HybridFactor &other, double tol) const { const This *e = dynamic_cast(&other); if (e == nullptr) return false; - if (auto gm = asMixture()) { - auto other = e->asMixture(); + if (auto gm = asHybrid()) { + auto other = e->asHybrid(); return other != nullptr && gm->equals(*other, tol); } if (auto gc = asGaussian()) { @@ -119,7 +119,7 @@ double HybridConditional::error(const HybridValues &values) const { if (auto gc = asGaussian()) { return gc->error(values.continuous()); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->error(values); } if (auto dc = asDiscrete()) { @@ -134,7 +134,7 @@ double HybridConditional::logProbability(const HybridValues &values) const { if (auto gc = asGaussian()) { return gc->logProbability(values.continuous()); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->logProbability(values); } if (auto dc = asDiscrete()) { @@ -149,7 +149,7 @@ double HybridConditional::logNormalizationConstant() const { if (auto gc = asGaussian()) { return gc->logNormalizationConstant(); } - if (auto gm = asMixture()) { + if (auto gm = asHybrid()) { return gm->logNormalizationConstant(); // 0.0! } if (auto dc = asDiscrete()) { diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index fb51e95f67..f44ee2bf99 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -124,11 +124,11 @@ class GTSAM_EXPORT HybridConditional /** * @brief Construct a new Hybrid Conditional object * - * @param gaussianMixture Gaussian Mixture Conditional used to create the + * @param hybridGaussianCond Hybrid Gaussian Conditional used to create the * HybridConditional. */ HybridConditional( - const std::shared_ptr& gaussianMixture); + const std::shared_ptr& hybridGaussianCond); /// @} /// @name Testable @@ -148,10 +148,10 @@ class GTSAM_EXPORT HybridConditional /** * @brief Return HybridConditional as a HybridGaussianConditional - * @return nullptr if not a mixture + * @return nullptr if not a conditional * @return HybridGaussianConditional::shared_ptr otherwise */ - HybridGaussianConditional::shared_ptr asMixture() const { + HybridGaussianConditional::shared_ptr asHybrid() const { return std::dynamic_pointer_cast(inner_); } diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4c2739c865..ad5f70eb03 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -249,20 +249,20 @@ std::function &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { // Get the discrete keys as sets for the decision tree - // and the gaussian mixture. + // and the hybrid gaussian conditional. auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys()); - auto gaussianMixtureKeySet = DiscreteKeysAsSet(this->discreteKeys()); + auto hybridGaussianCondKeySet = DiscreteKeysAsSet(this->discreteKeys()); - auto pruner = [discreteProbs, discreteProbsKeySet, gaussianMixtureKeySet]( + auto pruner = [discreteProbs, discreteProbsKeySet, hybridGaussianCondKeySet]( const Assignment &choices, const GaussianConditional::shared_ptr &conditional) -> GaussianConditional::shared_ptr { // typecast so we can use this to get probability value const DiscreteValues values(choices); - // Case where the gaussian mixture has the same + // Case where the hybrid gaussian conditional has the same // discrete keys as the decision tree. - if (gaussianMixtureKeySet == discreteProbsKeySet) { + if (hybridGaussianCondKeySet == discreteProbsKeySet) { if (discreteProbs(values) == 0.0) { // empty aka null pointer std::shared_ptr null; @@ -274,7 +274,7 @@ HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) { std::vector set_diff; std::set_difference( discreteProbsKeySet.begin(), discreteProbsKeySet.end(), - gaussianMixtureKeySet.begin(), gaussianMixtureKeySet.end(), + hybridGaussianCondKeySet.begin(), hybridGaussianCondKeySet.end(), std::back_inserter(set_diff)); const std::vector assignments = diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 240160744b..eb2bbb9378 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -33,8 +33,8 @@ namespace gtsam { class HybridValues; /** - * @brief A conditional of gaussian mixtures indexed by discrete variables, as - * part of a Bayes Network. This is the result of the elimination of a + * @brief A conditional of gaussian conditionals indexed by discrete variables, + * as part of a Bayes Network. This is the result of the elimination of a * continuous variable in a hybrid scheme, such that the remaining variables are * discrete+continuous. * @@ -107,7 +107,7 @@ class GTSAM_EXPORT HybridGaussianConditional const Conditionals &conditionals); /** - * @brief Make a Gaussian Mixture from a vector of Gaussian conditionals. + * @brief Make a Hybrid Gaussian Conditional from a vector of Gaussian conditionals. * The DecisionTree-based constructor is preferred over this one. * * @param continuousFrontals The continuous frontal variables @@ -152,8 +152,8 @@ class GTSAM_EXPORT HybridGaussianConditional double logNormalizationConstant() const override { return logConstant_; } /** - * Create a likelihood factor for a Gaussian mixture, return a Mixture factor - * on the parents. + * Create a likelihood factor for a hybrid Gaussian conditional, + * return a hybrid Gaussian factor on the parents. */ std::shared_ptr likelihood( const VectorValues &given) const; @@ -172,9 +172,9 @@ class GTSAM_EXPORT HybridGaussianConditional const VectorValues &continuousValues) const; /** - * @brief Compute the error of this Gaussian Mixture. + * @brief Compute the error of this hybrid Gaussian conditional. * - * This requires some care, as different mixture components may have + * This requires some care, as different components may have * different normalization constants. Let's consider p(x|y,m), where m is * discrete. We need the error to satisfy the invariant: * @@ -209,7 +209,7 @@ class GTSAM_EXPORT HybridGaussianConditional const VectorValues &continuousValues) const; /** - * @brief Compute the logProbability of this Gaussian Mixture. + * @brief Compute the logProbability of this hybrid Gaussian conditional. * * @param values Continuous values and discrete assignment. * @return double diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 5f9cd925e3..8c73b7952c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -37,13 +37,13 @@ class VectorValues; using GaussianFactorValuePair = std::pair; /** - * @brief Implementation of a discrete conditional mixture factor. + * @brief Implementation of a discrete-conditioned hybrid factor. * Implements a joint discrete-continuous factor where the discrete variable - * serves to "select" a mixture component corresponding to a GaussianFactor type - * of measurement. + * serves to "select" a component corresponding to a GaussianFactor. * - * Represents the underlying Gaussian mixture as a Decision Tree, where the set - * of discrete variables indexes to the continuous gaussian distribution. + * Represents the underlying hybrid Gaussian components as a Decision Tree, + * where the set of discrete variables indexes to + * the continuous gaussian distribution. * * @ingroup hybrid */ @@ -80,7 +80,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { HybridGaussianFactor() = default; /** - * @brief Construct a new Gaussian mixture factor. + * @brief Construct a new hybrid Gaussian factor. * * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 3621507451..28a0c446fd 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -115,7 +115,7 @@ void HybridGaussianFactorGraph::printErrors( } else { // Is hybrid auto conditionalComponent = - hc->asMixture()->operator()(values.discrete()); + hc->asHybrid()->operator()(values.discrete()); conditionalComponent->print(ss.str(), keyFormatter); std::cout << "error = " << conditionalComponent->error(values) << "\n"; @@ -184,7 +184,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { } else if (auto gm = dynamic_pointer_cast(f)) { result = gm->add(result); } else if (auto hc = dynamic_pointer_cast(f)) { - if (auto gm = hc->asMixture()) { + if (auto gm = hc->asHybrid()) { result = gm->add(result); } else if (auto g = hc->asGaussian()) { result = addGaussian(result, g); @@ -437,8 +437,8 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: - // 1. continuous variable, make a Gaussian Mixture if there are hybrid - // factors; + // 1. continuous variable, make a hybrid Gaussian conditional if there are + // hybrid factors; // 2. continuous variable, we make a Gaussian Factor if there are no hybrid // factors; // 3. discrete variable, no continuous factor is allowed @@ -550,9 +550,10 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( f = hc->inner(); } - if (auto gaussianMixture = dynamic_pointer_cast(f)) { + if (auto hybridGaussianCond = + dynamic_pointer_cast(f)) { // Compute factor error and add it. - error_tree = error_tree + gaussianMixture->errorTree(continuousValues); + error_tree = error_tree + hybridGaussianCond->errorTree(continuousValues); } else if (auto gaussian = dynamic_pointer_cast(f)) { // If continuous only, get the (double) error // and add it to the error_tree diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 911058437d..d7aa1042f5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -210,7 +210,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @brief Create a decision tree of factor graphs out of this hybrid factor * graph. * - * For example, if there are two mixture factors, one with a discrete key A + * For example, if there are two hybrid factors, one with a discrete key A * and one with a discrete key B, then the decision tree will have two levels, * one for A and one for B. The leaves of the tree will be the Gaussian * factors that have only continuous keys. diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index ae71300212..3c0e2ab91f 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -37,11 +37,10 @@ namespace gtsam { using NonlinearFactorValuePair = std::pair; /** - * @brief Implementation of a discrete conditional mixture factor. + * @brief Implementation of a discrete-conditioned hybrid factor. * * Implements a joint discrete-continuous factor where the discrete variable - * serves to "select" a mixture component corresponding to a NonlinearFactor - * type of measurement. + * serves to "select" a hybrid component corresponding to a NonlinearFactor. * * This class stores all factors as HybridFactors which can then be typecast to * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 78370a157a..6d19ef156d 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -151,7 +151,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( if (!f) { continue; } - // Check if it is a nonlinear mixture factor + // Check if it is a hybrid nonlinear factor if (auto mf = dynamic_pointer_cast(f)) { const HybridGaussianFactor::shared_ptr& gmf = mf->linearize(continuousValues); diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 546837bf98..b898c0520c 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -100,7 +100,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, // If hybridBayesNet is not empty, // it means we have conditionals to add to the factor graph. if (!hybridBayesNet.empty()) { - // We add all relevant conditional mixtures on the last continuous variable + // We add all relevant hybrid conditionals on the last continuous variable // in the previous `hybridBayesNet` to the graph // Conditionals to remove from the bayes net @@ -140,7 +140,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, /* ************************************************************************* */ HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { - return hybridBayesNet_.at(index)->asMixture(); + return hybridBayesNet_.at(index)->asHybrid(); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index fc40e02974..267746ab62 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -34,7 +34,7 @@ class GTSAM_EXPORT HybridSmoother { * Given new factors, perform an incremental update. * The relevant densities in the `hybridBayesNet` will be added to the input * graph (fragment), and then eliminated according to the `ordering` - * presented. The remaining factor graph contains Gaussian mixture factors + * presented. The remaining factor graph contains hybrid Gaussian factors * that are not connected to the variables in the ordering, or a single * discrete factor on all discrete keys, plus all discrete factors in the * original graph. @@ -68,7 +68,13 @@ class GTSAM_EXPORT HybridSmoother { const HybridGaussianFactorGraph& graph, const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; - /// Get the Gaussian Mixture from the Bayes Net posterior at `index`. + /** + * @brief Get the hybrid Gaussian conditional from + * the Bayes Net posterior at `index`. + * + * @param index Indexing value. + * @return HybridGaussianConditional::shared_ptr + */ HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const; /// Return the Bayes Net posterior. diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index df4e6966da..ef179eb9d8 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -65,7 +65,7 @@ virtual class HybridConditional { double logProbability(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const; - gtsam::HybridGaussianConditional* asMixture() const; + gtsam::HybridGaussianConditional* asHybrid() const; gtsam::GaussianConditional* asGaussian() const; gtsam::DiscreteConditional* asDiscrete() const; gtsam::Factor* inner(); diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index b3bb14c8f8..09905bf797 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -40,7 +40,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, bool manyModes = false) { HybridBayesNet bayesNet; - // Create Gaussian mixture z_i = x0 + noise for each measurement. + // Create hybrid Gaussian factor z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; std::vector conditionals{ diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index bbe2d3df51..42ccf16009 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -144,13 +144,13 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); - EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); - EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); - EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asMixture())(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); } @@ -280,9 +280,9 @@ TEST(HybridBayesNet, Pruning) { const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}}; const HybridValues hybridValues{delta.continuous(), discrete_values}; double logProbability = 0; - logProbability += posterior->at(0)->asMixture()->logProbability(hybridValues); - logProbability += posterior->at(1)->asMixture()->logProbability(hybridValues); - logProbability += posterior->at(2)->asMixture()->logProbability(hybridValues); + logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); + logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); + logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); // NOTE(dellaert): the discrete errors were not added in logProbability tree! logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); diff --git a/gtsam/hybrid/tests/testHybridConditional.cpp b/gtsam/hybrid/tests/testHybridConditional.cpp index 78dbd314cb..365dde3bc1 100644 --- a/gtsam/hybrid/tests/testHybridConditional.cpp +++ b/gtsam/hybrid/tests/testHybridConditional.cpp @@ -44,8 +44,8 @@ TEST(HybridConditional, Invariants) { CHECK(hc0->isHybrid()); // Check invariants as a HybridGaussianConditional. - const auto mixture = hc0->asMixture(); - EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values)); + const auto conditional = hc0->asHybrid(); + EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values)); // Check invariants as a HybridConditional. EXPECT(HybridConditional::CheckInvariants(*hc0, values)); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index ee48ad5d8b..6f28f639da 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -429,7 +429,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { nfg.emplace_shared>(X(0), 0.0, noise_model); nfg.emplace_shared>(X(1), 1.0, noise_model); - // Add mixture factor: + // Add hybrid nonlinear factor: DiscreteKey m(M(0), 2); const auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 8a2e3be3c9..20719b7395 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -50,7 +50,7 @@ TEST(HybridFactorGraph, Keys) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // Add a gaussian mixture factor ϕ(x1, c1) + // Add a hybrid Gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); DecisionTree dt( M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index d011584133..406203db84 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -52,9 +52,8 @@ const std::vector conditionals{ commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const HybridGaussianConditional mixture( - {Z(0)}, {X(0)}, {mode}, - HybridGaussianConditional::Conditionals({mode}, conditionals)); +const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, + conditionals); } // namespace equal_constants /* ************************************************************************* */ @@ -62,21 +61,21 @@ const HybridGaussianConditional mixture( TEST(HybridGaussianConditional, Invariants) { using namespace equal_constants; - // Check that the mixture normalization constant is the max of all constants - // which are all equal, in this case, hence: - const double K = mixture.logNormalizationConstant(); + // Check that the conditional normalization constant is the max of all + // constants which are all equal, in this case, hence: + const double K = hybrid_conditional.logNormalizationConstant(); EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); - EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0)); - EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1)); + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0)); + EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1)); } /* ************************************************************************* */ /// Check LogProbability. TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; - auto actual = mixture.logProbability(vv); + auto actual = hybrid_conditional.logProbability(vv); // Check result. std::vector discrete_keys = {mode}; @@ -90,7 +89,7 @@ TEST(HybridGaussianConditional, LogProbability) { for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv), - mixture.logProbability(hv), 1e-8); + hybrid_conditional.logProbability(hv), 1e-8); } } @@ -98,7 +97,7 @@ TEST(HybridGaussianConditional, LogProbability) { /// Check error. TEST(HybridGaussianConditional, Error) { using namespace equal_constants; - auto actual = mixture.errorTree(vv); + auto actual = hybrid_conditional.errorTree(vv); // Check result. std::vector discrete_keys = {mode}; @@ -111,8 +110,8 @@ TEST(HybridGaussianConditional, Error) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), mixture.error(hv), - 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), + hybrid_conditional.error(hv), 1e-8); } } @@ -123,11 +122,14 @@ TEST(HybridGaussianConditional, Likelihood) { using namespace equal_constants; // Compute likelihood - auto likelihood = mixture.likelihood(vv); + auto likelihood = hybrid_conditional.likelihood(vv); - // Check that the mixture error and the likelihood error are the same. - EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8); + // Check that the hybrid conditional error and the likelihood error are the + // same. + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check that likelihood error is as expected, i.e., just the errors of the // individual likelihoods, in the `equal_constants` case. @@ -141,7 +143,8 @@ TEST(HybridGaussianConditional, Likelihood) { std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -155,16 +158,15 @@ const std::vector conditionals{ 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const HybridGaussianConditional mixture( - {Z(0)}, {X(0)}, {mode}, - HybridGaussianConditional::Conditionals({mode}, conditionals)); +const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode, + conditionals); } // namespace mode_dependent_constants /* ************************************************************************* */ // Create a test for continuousParents. TEST(HybridGaussianConditional, ContinuousParents) { using namespace mode_dependent_constants; - const KeyVector continuousParentKeys = mixture.continuousParents(); + const KeyVector continuousParentKeys = hybrid_conditional.continuousParents(); // Check that the continuous parent keys are correct: EXPECT(continuousParentKeys.size() == 1); EXPECT(continuousParentKeys[0] == X(0)); @@ -177,12 +179,14 @@ TEST(HybridGaussianConditional, Likelihood2) { using namespace mode_dependent_constants; // Compute likelihood - auto likelihood = mixture.likelihood(vv); + auto likelihood = hybrid_conditional.likelihood(vv); - // Check that the mixture error and the likelihood error are as expected, - // this invariant is the same as the equal noise case: - EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8); + // Check that the hybrid conditional error and the likelihood error are as + // expected, this invariant is the same as the equal noise case: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check the detailed JacobianFactor calculation for mode==1. { @@ -195,7 +199,7 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = mixture.logNormalizationConstant() - + const double C1 = hybrid_conditional.logNormalizationConstant() - conditionals[1]->logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); @@ -209,15 +213,16 @@ TEST(HybridGaussianConditional, Likelihood2) { Vector actual_whitened = jf1->error_vector(vv); EXPECT(assert_equal(expected_whitened, actual_whitened)); - // Check that the error is equal to the mixture error: - EXPECT_DOUBLES_EQUAL(mixture.error(hv1), jf1->error(hv1), 1e-8); + // Check that the error is equal to the conditional error: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), jf1->error(hv1), 1e-8); } // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 850a1e4d1c..9d7a09806b 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -45,7 +45,7 @@ using symbol_shorthand::X; using symbol_shorthand::Z; /* ************************************************************************* */ -// Check iterators of empty mixture. +// Check iterators of empty hybrid factor. TEST(HybridGaussianFactor, Constructor) { HybridGaussianFactor factor; HybridGaussianFactor::const_iterator const_it = factor.begin(); @@ -55,7 +55,7 @@ TEST(HybridGaussianFactor, Constructor) { } /* ************************************************************************* */ -// "Add" two mixture factors together. +// "Add" two hybrid factors together. TEST(HybridGaussianFactor, Sum) { DiscreteKey m1(1, 2), m2(2, 3); @@ -78,20 +78,20 @@ TEST(HybridGaussianFactor, Sum) { // TODO(Frank): why specify keys at all? And: keys in factor should be *all* // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // Design review! - HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); - HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); + HybridGaussianFactor hybridFactorA({X(1), X(2)}, {m1}, factorsA); + HybridGaussianFactor hybridFactorB({X(1), X(3)}, {m2}, factorsB); // Check that number of keys is 3 - EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); + EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); // Check that number of discrete keys is 1 - EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); + EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size()); - // Create sum of two mixture factors: it will be a decision tree now on both + // Create sum of two hybrid factors: it will be a decision tree now on both // discrete variables m1 and m2: GaussianFactorGraphTree sum; - sum += mixtureFactorA; - sum += mixtureFactorB; + sum += hybridFactorA; + sum += hybridFactorB; // Let's check that this worked: Assignment mode; @@ -112,7 +112,7 @@ TEST(HybridGaussianFactor, Printing) { auto f11 = std::make_shared(X(1), A1, X(2), A2, b); std::vector factors{{f10, 0.0}, {f11, 0.0}}; - HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(HybridGaussianFactor @@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{ } )"; - EXPECT(assert_print_equal(expected, mixtureFactor)); + EXPECT(assert_print_equal(expected, hybridFactor)); } /* ************************************************************************* */ @@ -181,7 +181,7 @@ TEST(HybridGaussianFactor, Error) { auto f1 = std::make_shared(X(1), A11, X(2), A12, b); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); VectorValues continuousValues; continuousValues.insert(X(1), Vector2(0, 0)); @@ -189,7 +189,7 @@ TEST(HybridGaussianFactor, Error) { // error should return a tree of errors, with nodes for each discrete value. AlgebraicDecisionTree error_tree = - mixtureFactor.errorTree(continuousValues); + hybridFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -202,7 +202,7 @@ TEST(HybridGaussianFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); + 4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9); } namespace test_gmm { diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index dc88c8b7ee..1530069aa1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -69,8 +69,8 @@ TEST(HybridGaussianFactorGraph, Creation) { hfg.emplace_shared(X(0), I_3x3, Z_3x1); - // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor - // graph + // Define a hybrid gaussian conditional P(x0|x1, c0) + // and add it to the factor graph. HybridGaussianConditional gm( {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), HybridGaussianConditional::Conditionals( @@ -125,7 +125,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // Add a gaussian mixture factor ϕ(x1, c1) + // Add a hybrid gaussian factor ϕ(x1, c1) DiscreteKey m1(M(1), 2); DecisionTree dt( M(1), {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, @@ -720,9 +720,9 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Create expected decision tree with two factor graphs: - // Get mixture factor: - auto mixture = fg.at(0); - CHECK(mixture); + // Get hybrid factor: + auto hybrid = fg.at(0); + CHECK(hybrid); // Get prior factor: const auto gf = fg.at(1); @@ -737,8 +737,8 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) GaussianFactorGraphTree expected{ - M(0), GaussianFactorGraph(std::vector{(*mixture)(d0), prior}), - GaussianFactorGraph(std::vector{(*mixture)(d1), prior})}; + M(0), GaussianFactorGraph(std::vector{(*hybrid)(d0), prior}), + GaussianFactorGraph(std::vector{(*hybrid)(d1), prior})}; EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); @@ -802,7 +802,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( @@ -835,7 +835,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { const DiscreteKey mode{M(0), 2}; HybridBayesNet bn; - // Create Gaussian mixture z_0 = x0 + noise for each measurement. + // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement. std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; @@ -863,7 +863,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759), @@ -900,7 +900,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { // Create expected Bayes Net: HybridBayesNet expectedBayesNet; - // Create Gaussian mixture on X(0). + // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = std::make_shared( @@ -953,7 +953,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // Add measurements: for (size_t t : {0, 1, 2}) { - // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): + // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), @@ -970,7 +970,8 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // Add motion models: for (size_t t : {2, 1}) { - // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): + // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) + // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; std::vector conditionals{ GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1, diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index e2a10a783d..72ab43a2ab 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -333,13 +333,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents()); + 2, incrementalHybrid[X(0)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + 3, incrementalHybrid[X(1)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(2)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asHybrid()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; @@ -354,9 +354,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(4)]->conditional()->asHybrid()->nrComponents()); } /* ************************************************************************/ @@ -422,9 +422,8 @@ TEST(HybridGaussianISAM, NonTrivial) { noise_model); std::vector> components = { {moving, 0.0}, {still, 0.0}}; - auto mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -462,9 +461,8 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -505,9 +503,8 @@ TEST(HybridGaussianISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), @@ -551,7 +548,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // Test if pruning worked correctly by checking that we only have 3 leaves in // the last node. - auto lastConditional = inc[X(3)]->conditional()->asMixture(); + auto lastConditional = inc[X(3)]->conditional()->asHybrid(); EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp index a4c1b4cc7e..92efa680a3 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactor.cpp @@ -35,7 +35,7 @@ using symbol_shorthand::M; using symbol_shorthand::X; /* ************************************************************************* */ -// Check iterators of empty mixture. +// Check iterators of empty hybrid factor. TEST(HybridNonlinearFactor, Constructor) { HybridNonlinearFactor factor; HybridNonlinearFactor::const_iterator const_it = factor.begin(); @@ -60,7 +60,7 @@ TEST(HybridNonlinearFactor, Printing) { std::make_shared>(X(1), X(2), between1, model); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactor hybridFactor({X(1), X(2)}, {m1}, factors); std::string expected = R"(Hybrid [x1 x2; 1] @@ -69,7 +69,7 @@ HybridNonlinearFactor 0 Leaf Nonlinear factor on 2 keys 1 Leaf Nonlinear factor on 2 keys )"; - EXPECT(assert_print_equal(expected, mixtureFactor)); + EXPECT(assert_print_equal(expected, hybridFactor)); } /* ************************************************************************* */ @@ -94,14 +94,14 @@ static HybridNonlinearFactor getHybridNonlinearFactor() { /* ************************************************************************* */ // Test the error of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Error) { - auto mixtureFactor = getHybridNonlinearFactor(); + auto hybridFactor = getHybridNonlinearFactor(); Values continuousValues; continuousValues.insert(X(1), 0); continuousValues.insert(X(2), 1); AlgebraicDecisionTree error_tree = - mixtureFactor.errorTree(continuousValues); + hybridFactor.errorTree(continuousValues); DiscreteKey m1(1, 2); std::vector discrete_keys = {m1}; @@ -114,8 +114,8 @@ TEST(HybridNonlinearFactor, Error) { /* ************************************************************************* */ // Test dim of the HybridNonlinearFactor TEST(HybridNonlinearFactor, Dim) { - auto mixtureFactor = getHybridNonlinearFactor(); - EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); + auto hybridFactor = getHybridNonlinearFactor(); + EXPECT_LONGS_EQUAL(1, hybridFactor.dim()); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3631ac44ed..36821a7820 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -281,7 +281,7 @@ TEST(HybridFactorGraph, EliminationTree) { TEST(GaussianElimination, Eliminate_x0) { Switching self(3); - // Gather factors on x1, has a simple Gaussian and a mixture factor. + // Gather factors on x1, has a simple Gaussian and a hybrid factor. HybridGaussianFactorGraph factors; // Add gaussian prior factors.push_back(self.linearizedFactorGraph[0]); @@ -306,7 +306,7 @@ TEST(GaussianElimination, Eliminate_x0) { TEST(HybridsGaussianElimination, Eliminate_x1) { Switching self(3); - // Gather factors on x1, will be two mixture factors (with x0 and x2, resp.). + // Gather factors on x1, will be two hybrid factors (with x0 and x2, resp.). HybridGaussianFactorGraph factors; factors.push_back(self.linearizedFactorGraph[1]); // involves m0 factors.push_back(self.linearizedFactorGraph[2]); // involves m1 @@ -349,18 +349,18 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { // Eliminate x0 const Ordering ordering{X(0), X(1)}; - const auto [hybridConditionalMixture, factorOnModes] = + const auto [hybridConditional, factorOnModes] = EliminateHybrid(factors, ordering); - auto gaussianConditionalMixture = + auto hybridGaussianConditional = dynamic_pointer_cast( - hybridConditionalMixture->inner()); + hybridConditional->inner()); - CHECK(gaussianConditionalMixture); + CHECK(hybridGaussianConditional); // Frontals = [x0, x1] - EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); + EXPECT_LONGS_EQUAL(2, hybridGaussianConditional->nrFrontals()); // 1 parent, which is the mode - EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); + EXPECT_LONGS_EQUAL(1, hybridGaussianConditional->nrParents()); // This is now a discreteFactor auto discreteFactor = dynamic_pointer_cast(factorOnModes); @@ -849,8 +849,8 @@ namespace test_relinearization { * This way we can directly provide the likelihoods and * then perform (re-)linearization. * - * @param means The means of the GaussianMixtureFactor components. - * @param sigmas The covariances of the GaussianMixtureFactor components. + * @param means The means of the HybridGaussianFactor components. + * @param sigmas The covariances of the HybridGaussianFactor components. * @param m1 The discrete key. * @param x0_measurement A measurement on X0 * @return HybridGaussianFactorGraph diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 6932508dfb..86e045bb03 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -358,13 +358,13 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, bayesTree.size()); EXPECT_LONGS_EQUAL( - 2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents()); + 2, bayesTree[X(0)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents()); + 3, bayesTree[X(1)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(2)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asHybrid()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; @@ -382,9 +382,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, bayesTree.size()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asHybrid()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(4)]->conditional()->asHybrid()->nrComponents()); } /* ************************************************************************/ @@ -441,9 +441,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { noise_model); std::vector> components = { {moving, 0.0}, {still, 0.0}}; - auto mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(1), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(0), X(1), Pose2(1.0, 0.0, 0), @@ -481,9 +480,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(2), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(1), X(2), Pose2(1.0, 0.0, 0), @@ -524,9 +522,8 @@ TEST(HybridNonlinearISAM, NonTrivial) { moving = std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; - mixtureFactor = std::make_shared( + fg.emplace_shared( contKeys, gtsam::DiscreteKey(M(3), 2), components); - fg.push_back(mixtureFactor); // Add equivalent of ImuFactor fg.emplace_shared>(X(2), X(3), Pose2(1.0, 0.0, 0), @@ -572,7 +569,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { // Test if pruning worked correctly by checking that // we only have 3 leaves in the last node. - auto lastConditional = bayesTree[X(3)]->conditional()->asMixture(); + auto lastConditional = bayesTree[X(3)]->conditional()->asHybrid(); EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 5eafc05aed..4ec1aec1ed 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -46,9 +46,9 @@ def test_create(self): self.assertEqual(hbn.size(), 2) - mixture = hbn.at(0).inner() - self.assertIsInstance(mixture, HybridGaussianConditional) - self.assertEqual(len(mixture.keys()), 2) + hybridCond = hbn.at(0).inner() + self.assertIsInstance(hybridCond, HybridGaussianConditional) + self.assertEqual(len(hybridCond.keys()), 2) discrete_conditional = hbn.at(hbn.size() - 1).inner() self.assertIsInstance(discrete_conditional, DiscreteConditional) @@ -90,7 +90,7 @@ def tiny(num_measurements: int = 1, # Create mode key: 0 is low-noise, 1 is high-noise. mode = (M(0), 2) - # Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + # Create hybrid Gaussian conditional Z(0) = X(0) + noise for each measurement. I_1x1 = np.eye(1) for i in range(num_measurements): conditional0 = GaussianConditional.FromMeanAndStddev(Z(i),