From e114e9f6d2c893dcabaf2c7beea076fa0322b9c7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 26 Mar 2023 16:02:36 -0400 Subject: [PATCH 01/36] add nrAssignments method for DecisionTree --- gtsam/discrete/DecisionTree-inl.h | 13 ++++++++++++- gtsam/discrete/DecisionTree.h | 9 +++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 9d618dea02..2f55fb3fcb 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -649,8 +649,9 @@ namespace gtsam { throw std::invalid_argument("DecisionTree::create invalid argument"); } auto choice = std::make_shared(begin->first, endY - beginY); - for (ValueIt y = beginY; y != endY; y++) + for (ValueIt y = beginY; y != endY; y++) { choice->push_back(NodePtr(new Leaf(*y))); + } return Choice::Unique(choice); } @@ -827,6 +828,16 @@ namespace gtsam { return total; } + /****************************************************************************/ + template + size_t DecisionTree::nrAssignments() const { + size_t n = 0; + this->visitLeaf([&n](const DecisionTree::Leaf& leaf) { + n += leaf.nrAssignments(); + }); + return n; + } + /****************************************************************************/ // fold is just done with a visit template diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index ed19084859..70d528648d 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -299,6 +299,15 @@ namespace gtsam { /// Return the number of leaves in the tree. size_t nrLeaves() const; + /** + * @brief Return the number of total leaf assignments. + * This includes counts removed from implicit pruning hence, + * it will always be >= nrLeaves(). + * + * @return size_t + */ + size_t nrAssignments() const; + /** * @brief Fold a binary function over the tree, returning accumulator. * From 6aa7d667f39bb1b8fe2608f7354a98751d70da03 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 26 Mar 2023 16:04:16 -0400 Subject: [PATCH 02/36] add unit test showing issue with nrAssignments --- gtsam/discrete/tests/testDecisionTree.cpp | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index d2a94ddc33..c9973a3e91 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #include @@ -329,6 +330,9 @@ TEST(DecisionTree, Containers) { TEST(DecisionTree, NrAssignments) { const std::pair A("A", 2), B("B", 2), C("C", 2); DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); + + EXPECT_LONGS_EQUAL(8, tree.nrAssignments()); + EXPECT(tree.root_->isLeaf()); auto leaf = std::dynamic_pointer_cast(tree.root_); EXPECT_LONGS_EQUAL(8, leaf->nrAssignments()); @@ -348,6 +352,8 @@ TEST(DecisionTree, NrAssignments) { 1 1 Leaf 5 */ + EXPECT_LONGS_EQUAL(8, tree2.nrAssignments()); + auto root = std::dynamic_pointer_cast(tree2.root_); CHECK(root); auto choice0 = std::dynamic_pointer_cast(root->branches()[0]); @@ -531,6 +537,23 @@ TEST(DecisionTree, ApplyWithAssignment) { EXPECT_LONGS_EQUAL(5, count); } +/* ************************************************************************** */ +// Test number of assignments. +TEST(DecisionTree, NrAssignments2) { + using gtsam::symbol_shorthand::M; + + DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; + std::vector probs = {0, 0, 1, 2}; + DecisionTree dt1(keys, probs); + + EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); + + DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; + DecisionTree dt2(keys2, probs); + //TODO(Varun) The below is failing, because the number of assignments aren't being set correctly. + EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); +} + /* ************************************************************************* */ int main() { TestResult tr; From 181869566459c40beb244ec90342c3f11e9d8aeb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 29 Mar 2023 18:40:53 -0400 Subject: [PATCH 03/36] updated docs to better describe nrAssignments --- gtsam/discrete/DecisionTree.h | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 70d528648d..9cff7aa47c 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -303,6 +303,26 @@ namespace gtsam { * @brief Return the number of total leaf assignments. * This includes counts removed from implicit pruning hence, * it will always be >= nrLeaves(). + * + * E.g. we have a decision tree as below, where each node has 2 branches: + * + * Choice(m1) + * 0 Choice(m0) + * 0 0 Leaf 0.0 + * 0 1 Leaf 0.0 + * 1 Choice(m0) + * 1 0 Leaf 1.0 + * 1 1 Leaf 2.0 + * + * In the unpruned form, the tree will have 4 assignments, 2 for each key, and 4 leaves. + * + * In the pruned form, the number of assignments is still 4 but the number of leaves is now 3, as below: + * + * Choice(m1) + * 0 Leaf 0.0 + * 1 Choice(m0) + * 1 0 Leaf 1.0 + * 1 1 Leaf 2.0 * * @return size_t */ From 73b563a9aa2db352d16a22ede655d02cae581b37 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 7 Jun 2023 21:13:03 -0400 Subject: [PATCH 04/36] WIP for debugging nrAssignments issue --- gtsam/discrete/DecisionTree-inl.h | 57 +++++++++++++++++------ gtsam/discrete/tests/testDecisionTree.cpp | 49 ++++++++++++------- 2 files changed, 73 insertions(+), 33 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 2f55fb3fcb..1b0472a807 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -93,7 +93,7 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; + std::cout << s << " Leaf " << valueFormatter(constant_) << " | nrAssignments: " << nrAssignments_ << std::endl; } /** Write graphviz format to stream `os`. */ @@ -207,9 +207,9 @@ namespace gtsam { size_t nrAssignments = 0; for(auto branch: f->branches()) { - assert(branch->isLeaf()); - nrAssignments += - std::dynamic_pointer_cast(branch)->nrAssignments(); + if (auto leaf = std::dynamic_pointer_cast(branch)) { + nrAssignments += leaf->nrAssignments(); + } } NodePtr newLeaf( new Leaf(std::dynamic_pointer_cast(f0)->constant(), @@ -217,9 +217,35 @@ namespace gtsam { return newLeaf; } else #endif + // { + // Choice choice_node; + + // for (auto branch : f->branches()) { + // if (auto choice = std::dynamic_pointer_cast(branch)) { + // // `branch` is a Choice node so we apply Unique to it. + // choice_node.push_back(Unique(choice)); + + // } else if (auto leaf = + // std::dynamic_pointer_cast(branch)) { + // choice_node.push_back(leaf); + // } + // } + // return std::make_shared(choice_node); + // } return f; } + static NodePtr UpdateNrAssignments(const NodePtr& f) { + if (auto choice = std::dynamic_pointer_cast(f)) { + // `f` is a Choice node so we recurse. + return UpdateNrAssignments(f); + + } else if (auto leaf = std::dynamic_pointer_cast(f)) { + + } + } + bool isLeaf() const override { return false; } /// Constructor, given choice label and mandatory expected branch count. @@ -282,7 +308,7 @@ namespace gtsam { void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { std::cout << s << " Choice("; - std::cout << labelFormatter(label_) << ") " << std::endl; + std::cout << labelFormatter(label_) << ") " << " | All Same: " << allSame_ << " | nrBranches: " << branches_.size() << std::endl; for (size_t i = 0; i < branches_.size(); i++) { branches_[i]->print(s + " " + std::to_string(i), labelFormatter, valueFormatter); } @@ -569,16 +595,16 @@ namespace gtsam { // find highest label among branches std::optional highestLabel; size_t nrChoices = 0; - for (Iterator it = begin; it != end; it++) { - if (it->root_->isLeaf()) - continue; - std::shared_ptr c = - std::dynamic_pointer_cast(it->root_); - if (!highestLabel || c->label() > *highestLabel) { - highestLabel = c->label(); - nrChoices = c->nrChoices(); - } - } + // for (Iterator it = begin; it != end; it++) { + // if (it->root_->isLeaf()) + // continue; + // std::shared_ptr c = + // std::dynamic_pointer_cast(it->root_); + // if (!highestLabel || c->label() > *highestLabel) { + // highestLabel = c->label(); + // nrChoices = c->nrChoices(); + // } + // } // if label is already in correct order, just put together a choice on label if (!nrChoices || !highestLabel || label > *highestLabel) { @@ -604,6 +630,7 @@ namespace gtsam { NodePtr fi = compose(functions.begin(), functions.end(), label); choiceOnHighestLabel->push_back(fi); } + // return Choice::ComputeNrAssignments(Choice::Unique(choiceOnHighestLabel)); return Choice::Unique(choiceOnHighestLabel); } } diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index c9973a3e91..beb100a61e 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -121,7 +121,7 @@ struct Ring { /* ************************************************************************** */ // test DT -TEST(DecisionTree, example) { +TEST_DISABLED(DecisionTree, example) { // Create labels string A("A"), B("B"), C("C"); @@ -231,7 +231,7 @@ TEST(DecisionTree, example) { bool bool_of_int(const int& y) { return y != 0; }; typedef DecisionTree StringBoolTree; -TEST(DecisionTree, ConvertValuesOnly) { +TEST_DISABLED(DecisionTree, ConvertValuesOnly) { // Create labels string A("A"), B("B"); @@ -252,7 +252,7 @@ TEST(DecisionTree, ConvertValuesOnly) { enum Label { U, V, X, Y, Z }; typedef DecisionTree LabelBoolTree; -TEST(DecisionTree, ConvertBoth) { +TEST_DISABLED(DecisionTree, ConvertBoth) { // Create labels string A("A"), B("B"); @@ -279,7 +279,7 @@ TEST(DecisionTree, ConvertBoth) { /* ************************************************************************** */ // test Compose expansion -TEST(DecisionTree, Compose) { +TEST_DISABLED(DecisionTree, Compose) { // Create labels string A("A"), B("B"), C("C"); @@ -305,7 +305,7 @@ TEST(DecisionTree, Compose) { /* ************************************************************************** */ // Check we can create a decision tree of containers. -TEST(DecisionTree, Containers) { +TEST_DISABLED(DecisionTree, Containers) { using Container = std::vector; using StringContainerTree = DecisionTree; @@ -327,7 +327,7 @@ TEST(DecisionTree, Containers) { /* ************************************************************************** */ // Test nrAssignments. -TEST(DecisionTree, NrAssignments) { +TEST_DISABLED(DecisionTree, NrAssignments) { const std::pair A("A", 2), B("B", 2), C("C", 2); DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); @@ -375,7 +375,7 @@ TEST(DecisionTree, NrAssignments) { /* ************************************************************************** */ // Test visit. -TEST(DecisionTree, visit) { +TEST_DISABLED(DecisionTree, visit) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -387,7 +387,7 @@ TEST(DecisionTree, visit) { /* ************************************************************************** */ // Test visit, with Choices argument. -TEST(DecisionTree, visitWith) { +TEST_DISABLED(DecisionTree, visitWith) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -399,7 +399,7 @@ TEST(DecisionTree, visitWith) { /* ************************************************************************** */ // Test visit, with Choices argument. -TEST(DecisionTree, VisitWithPruned) { +TEST_DISABLED(DecisionTree, VisitWithPruned) { // Create pruned tree std::pair A("A", 2), B("B", 2), C("C", 2); std::vector> labels = {C, B, A}; @@ -437,7 +437,7 @@ TEST(DecisionTree, VisitWithPruned) { /* ************************************************************************** */ // Test fold. -TEST(DecisionTree, fold) { +TEST_DISABLED(DecisionTree, fold) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 1, 1), DT(A, 2, 3)); @@ -448,7 +448,7 @@ TEST(DecisionTree, fold) { /* ************************************************************************** */ // Test retrieving all labels. -TEST(DecisionTree, labels) { +TEST_DISABLED(DecisionTree, labels) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -458,7 +458,7 @@ TEST(DecisionTree, labels) { /* ************************************************************************** */ // Test unzip method. -TEST(DecisionTree, unzip) { +TEST_DISABLED(DecisionTree, unzip) { using DTP = DecisionTree>; using DT1 = DecisionTree; using DT2 = DecisionTree; @@ -479,7 +479,7 @@ TEST(DecisionTree, unzip) { /* ************************************************************************** */ // Test thresholding. -TEST(DecisionTree, threshold) { +TEST_DISABLED(DecisionTree, threshold) { // Create three level tree const vector keys{DT::LabelC("C", 2), DT::LabelC("B", 2), DT::LabelC("A", 2)}; @@ -524,6 +524,8 @@ TEST(DecisionTree, ApplyWithAssignment) { DT prunedTree = tree.apply(pruner); DT expectedTree(keys, "0 0 0 0 5 6 7 8"); + // expectedTree.print(); + // prunedTree.print(); EXPECT(assert_equal(expectedTree, prunedTree)); size_t count = 0; @@ -542,16 +544,27 @@ TEST(DecisionTree, ApplyWithAssignment) { TEST(DecisionTree, NrAssignments2) { using gtsam::symbol_shorthand::M; - DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; std::vector probs = {0, 0, 1, 2}; + + DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; DecisionTree dt1(keys, probs); - EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); - + dt1.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); + + /* Create the DecisionTree + Choice(m1) + 0 Choice(m0) + 0 0 Leaf 0.000000 + 0 1 Leaf 1.000000 + 1 Choice(m0) + 1 0 Leaf 0.000000 + 1 1 Leaf 2.000000 + */ DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; DecisionTree dt2(keys2, probs); - //TODO(Varun) The below is failing, because the number of assignments aren't being set correctly. - EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); + std::cout << "\n\n" << std::endl; + dt2.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); + // EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); } /* ************************************************************************* */ From 8a8f146517c7b4660f31ddb6ea79a85f64baf1df Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 09:36:08 -0400 Subject: [PATCH 05/36] update Unique to be recursive --- gtsam/discrete/DecisionTree-inl.h | 75 +++++++++++++++---------------- 1 file changed, 36 insertions(+), 39 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 1b0472a807..bf2c4e1a9e 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -93,7 +93,7 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf " << valueFormatter(constant_) << " | nrAssignments: " << nrAssignments_ << std::endl; + std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; } /** Write graphviz format to stream `os`. */ @@ -201,6 +201,7 @@ namespace gtsam { /// If all branches of a choice node f are the same, just return a branch. static NodePtr Unique(const ChoicePtr& f) { #ifndef GTSAM_DT_NO_PRUNING + // If all the branches are the same, we can merge them into one if (f->allSame_) { assert(f->branches().size() > 0); NodePtr f0 = f->branches_[0]; @@ -215,34 +216,30 @@ namespace gtsam { new Leaf(std::dynamic_pointer_cast(f0)->constant(), nrAssignments)); return newLeaf; + } else + // Else we recurse #endif - // { - // Choice choice_node; - - // for (auto branch : f->branches()) { - // if (auto choice = std::dynamic_pointer_cast(branch)) { - // // `branch` is a Choice node so we apply Unique to it. - // choice_node.push_back(Unique(choice)); - - // } else if (auto leaf = - // std::dynamic_pointer_cast(branch)) { - // choice_node.push_back(leaf); - // } - // } - // return std::make_shared(choice_node); - // } - return f; - } - - static NodePtr UpdateNrAssignments(const NodePtr& f) { - if (auto choice = std::dynamic_pointer_cast(f)) { - // `f` is a Choice node so we recurse. - return UpdateNrAssignments(f); - - } else if (auto leaf = std::dynamic_pointer_cast(f)) { - + { + + // Make non-const copy + auto ff = std::make_shared(f->label(), f->nrChoices()); + + // Iterate over all the branches + for (size_t i = 0; i < f->nrChoices(); i++) { + auto branch = f->branches_[i]; + if (auto leaf = std::dynamic_pointer_cast(branch)) { + // Leaf node, simply assign + ff->push_back(branch); + + } else if (auto choice = + std::dynamic_pointer_cast(branch)) { + // Choice node, we recurse + ff->push_back(Unique(choice)); + } + } + + return ff; } } @@ -308,7 +305,7 @@ namespace gtsam { void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { std::cout << s << " Choice("; - std::cout << labelFormatter(label_) << ") " << " | All Same: " << allSame_ << " | nrBranches: " << branches_.size() << std::endl; + std::cout << labelFormatter(label_) << ") " << std::endl; for (size_t i = 0; i < branches_.size(); i++) { branches_[i]->print(s + " " + std::to_string(i), labelFormatter, valueFormatter); } @@ -595,16 +592,16 @@ namespace gtsam { // find highest label among branches std::optional highestLabel; size_t nrChoices = 0; - // for (Iterator it = begin; it != end; it++) { - // if (it->root_->isLeaf()) - // continue; - // std::shared_ptr c = - // std::dynamic_pointer_cast(it->root_); - // if (!highestLabel || c->label() > *highestLabel) { - // highestLabel = c->label(); - // nrChoices = c->nrChoices(); - // } - // } + for (Iterator it = begin; it != end; it++) { + if (it->root_->isLeaf()) + continue; + std::shared_ptr c = + std::dynamic_pointer_cast(it->root_); + if (!highestLabel || c->label() > *highestLabel) { + highestLabel = c->label(); + nrChoices = c->nrChoices(); + } + } // if label is already in correct order, just put together a choice on label if (!nrChoices || !highestLabel || label > *highestLabel) { @@ -679,7 +676,7 @@ namespace gtsam { for (ValueIt y = beginY; y != endY; y++) { choice->push_back(NodePtr(new Leaf(*y))); } - return Choice::Unique(choice); + return choice; } // Recursive case: perform "Shannon expansion" From ff1ea32fabbd8c5c33af61f4e163c31a81e90208 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 09:37:11 -0400 Subject: [PATCH 06/36] remove unnecessary code --- gtsam/discrete/DecisionTree-inl.h | 1 - 1 file changed, 1 deletion(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index bf2c4e1a9e..be083236ed 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -627,7 +627,6 @@ namespace gtsam { NodePtr fi = compose(functions.begin(), functions.end(), label); choiceOnHighestLabel->push_back(fi); } - // return Choice::ComputeNrAssignments(Choice::Unique(choiceOnHighestLabel)); return Choice::Unique(choiceOnHighestLabel); } } From dbd0a7d3ba166cc4545f7168aa929e42392b14ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 09:53:22 -0400 Subject: [PATCH 07/36] re-enable DecisionTree tests --- gtsam/discrete/tests/testDecisionTree.cpp | 56 ++++++++++++++++------- 1 file changed, 39 insertions(+), 17 deletions(-) diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index beb100a61e..fb49908e2f 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -121,7 +121,7 @@ struct Ring { /* ************************************************************************** */ // test DT -TEST_DISABLED(DecisionTree, example) { +TEST(DecisionTree, example) { // Create labels string A("A"), B("B"), C("C"); @@ -231,7 +231,7 @@ TEST_DISABLED(DecisionTree, example) { bool bool_of_int(const int& y) { return y != 0; }; typedef DecisionTree StringBoolTree; -TEST_DISABLED(DecisionTree, ConvertValuesOnly) { +TEST(DecisionTree, ConvertValuesOnly) { // Create labels string A("A"), B("B"); @@ -252,7 +252,7 @@ TEST_DISABLED(DecisionTree, ConvertValuesOnly) { enum Label { U, V, X, Y, Z }; typedef DecisionTree LabelBoolTree; -TEST_DISABLED(DecisionTree, ConvertBoth) { +TEST(DecisionTree, ConvertBoth) { // Create labels string A("A"), B("B"); @@ -279,7 +279,7 @@ TEST_DISABLED(DecisionTree, ConvertBoth) { /* ************************************************************************** */ // test Compose expansion -TEST_DISABLED(DecisionTree, Compose) { +TEST(DecisionTree, Compose) { // Create labels string A("A"), B("B"), C("C"); @@ -305,7 +305,7 @@ TEST_DISABLED(DecisionTree, Compose) { /* ************************************************************************** */ // Check we can create a decision tree of containers. -TEST_DISABLED(DecisionTree, Containers) { +TEST(DecisionTree, Containers) { using Container = std::vector; using StringContainerTree = DecisionTree; @@ -327,7 +327,7 @@ TEST_DISABLED(DecisionTree, Containers) { /* ************************************************************************** */ // Test nrAssignments. -TEST_DISABLED(DecisionTree, NrAssignments) { +TEST(DecisionTree, NrAssignments) { const std::pair A("A", 2), B("B", 2), C("C", 2); DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); @@ -375,7 +375,7 @@ TEST_DISABLED(DecisionTree, NrAssignments) { /* ************************************************************************** */ // Test visit. -TEST_DISABLED(DecisionTree, visit) { +TEST(DecisionTree, visit) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -387,7 +387,7 @@ TEST_DISABLED(DecisionTree, visit) { /* ************************************************************************** */ // Test visit, with Choices argument. -TEST_DISABLED(DecisionTree, visitWith) { +TEST(DecisionTree, visitWith) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -399,7 +399,7 @@ TEST_DISABLED(DecisionTree, visitWith) { /* ************************************************************************** */ // Test visit, with Choices argument. -TEST_DISABLED(DecisionTree, VisitWithPruned) { +TEST(DecisionTree, VisitWithPruned) { // Create pruned tree std::pair A("A", 2), B("B", 2), C("C", 2); std::vector> labels = {C, B, A}; @@ -437,7 +437,7 @@ TEST_DISABLED(DecisionTree, VisitWithPruned) { /* ************************************************************************** */ // Test fold. -TEST_DISABLED(DecisionTree, fold) { +TEST(DecisionTree, fold) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 1, 1), DT(A, 2, 3)); @@ -448,7 +448,7 @@ TEST_DISABLED(DecisionTree, fold) { /* ************************************************************************** */ // Test retrieving all labels. -TEST_DISABLED(DecisionTree, labels) { +TEST(DecisionTree, labels) { // Create small two-level tree string A("A"), B("B"); DT tree(B, DT(A, 0, 1), DT(A, 2, 3)); @@ -458,7 +458,7 @@ TEST_DISABLED(DecisionTree, labels) { /* ************************************************************************** */ // Test unzip method. -TEST_DISABLED(DecisionTree, unzip) { +TEST(DecisionTree, unzip) { using DTP = DecisionTree>; using DT1 = DecisionTree; using DT2 = DecisionTree; @@ -479,7 +479,7 @@ TEST_DISABLED(DecisionTree, unzip) { /* ************************************************************************** */ // Test thresholding. -TEST_DISABLED(DecisionTree, threshold) { +TEST(DecisionTree, threshold) { // Create three level tree const vector keys{DT::LabelC("C", 2), DT::LabelC("B", 2), DT::LabelC("A", 2)}; @@ -541,7 +541,7 @@ TEST(DecisionTree, ApplyWithAssignment) { /* ************************************************************************** */ // Test number of assignments. -TEST(DecisionTree, NrAssignments2) { +TEST(DecisionTree, Constructor) { using gtsam::symbol_shorthand::M; std::vector probs = {0, 0, 1, 2}; @@ -551,6 +551,30 @@ TEST(DecisionTree, NrAssignments2) { EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); dt1.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); + DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; + DecisionTree dt2(keys2, probs); + std::cout << "\n" << std::endl; + dt2.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); +} + +/* ************************************************************************** */ +// Test number of assignments. +TEST(DecisionTree, NrAssignments2) { + using gtsam::symbol_shorthand::M; + + std::vector probs = {0, 0, 1, 2}; + + /* Create the decision tree + Choice(m1) + 0 Leaf 0.000000 + 1 Choice(m0) + 1 0 Leaf 1.000000 + 1 1 Leaf 2.000000 + */ + DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; + DecisionTree dt1(keys, probs); + EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); + /* Create the DecisionTree Choice(m1) 0 Choice(m0) @@ -562,9 +586,7 @@ TEST(DecisionTree, NrAssignments2) { */ DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; DecisionTree dt2(keys2, probs); - std::cout << "\n\n" << std::endl; - dt2.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); - // EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); + EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); } /* ************************************************************************* */ From 68cb7249707e4b6f9522d975ef054b9cfa6d005c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 09:53:39 -0400 Subject: [PATCH 08/36] add new build method to replace create, and let create call Unique --- gtsam/discrete/DecisionTree-inl.h | 31 +++++++++++++++++++++++-------- gtsam/discrete/DecisionTree.h | 12 +++++++++--- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index be083236ed..58de338a84 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -632,16 +632,16 @@ namespace gtsam { } /****************************************************************************/ - // "create" is a bit of a complicated thing, but very useful. + // "build" is a bit of a complicated thing, but very useful. // It takes a range of labels and a corresponding range of values, - // and creates a decision tree, as follows: + // and builds a decision tree, as follows: // - if there is only one label, creates a choice node with values in leaves // - otherwise, it evenly splits up the range of values and creates a tree for // each sub-range, and assigns that tree to first label's choices // Example: - // create([B A],[1 2 3 4]) would call - // create([A],[1 2]) - // create([A],[3 4]) + // build([B A],[1 2 3 4]) would call + // build([A],[1 2]) + // build([A],[3 4]) // and produce // B=0 // A=0: 1 @@ -649,12 +649,12 @@ namespace gtsam { // B=1 // A=0: 3 // A=1: 4 - // Note, through the magic of "compose", create([A B],[1 2 3 4]) will produce + // Note, through the magic of "compose", build([A B],[1 2 3 4]) will produce // exactly the same tree as above: the highest label is always the root. // However, it will be *way* faster if labels are given highest to lowest. template template - typename DecisionTree::NodePtr DecisionTree::create( + typename DecisionTree::NodePtr DecisionTree::build( It begin, It end, ValueIt beginY, ValueIt endY) const { // get crucial counts size_t nrChoices = begin->second; @@ -684,12 +684,27 @@ namespace gtsam { std::vector functions; size_t split = size / nrChoices; for (size_t i = 0; i < nrChoices; i++, beginY += split) { - NodePtr f = create(labelC, end, beginY, beginY + split); + NodePtr f = build(labelC, end, beginY, beginY + split); functions.emplace_back(f); } return compose(functions.begin(), functions.end(), begin->first); } + /****************************************************************************/ + // Take a range of labels and a corresponding range of values, + // and creates a decision tree. + template + template + typename DecisionTree::NodePtr DecisionTree::create( + It begin, It end, ValueIt beginY, ValueIt endY) const { + auto node = build(begin, end, beginY, endY); + if (auto choice = std::dynamic_pointer_cast(node)) { + return Choice::Unique(choice); + } else { + return node; + } + } + /****************************************************************************/ template template diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 9cff7aa47c..a2b05070bc 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -136,10 +136,16 @@ namespace gtsam { NodePtr root_; protected: - /** Internal recursive function to create from keys, cardinalities, - * and Y values + /** Internal recursive function to create from keys, cardinalities, + * and Y values */ - template + template + NodePtr build(It begin, It end, ValueIt beginY, ValueIt endY) const; + + /** Internal helper function to create from keys, cardinalities, + * and Y values + */ + template NodePtr create(It begin, It end, ValueIt beginY, ValueIt endY) const; /** From be70ffcf01f103235f1cdb06352a48f004603b43 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 10:36:21 -0400 Subject: [PATCH 09/36] remove excessive Unique call to improve efficiency --- gtsam/discrete/DecisionTree-inl.h | 6 +++--- gtsam/discrete/DecisionTree.h | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 58de338a84..16a926271c 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -627,7 +627,7 @@ namespace gtsam { NodePtr fi = compose(functions.begin(), functions.end(), label); choiceOnHighestLabel->push_back(fi); } - return Choice::Unique(choiceOnHighestLabel); + return choiceOnHighestLabel; } } @@ -691,8 +691,8 @@ namespace gtsam { } /****************************************************************************/ - // Take a range of labels and a corresponding range of values, - // and creates a decision tree. + // Top-level factory method, which takes a range of labels and a corresponding + // range of values, and creates a decision tree. template template typename DecisionTree::NodePtr DecisionTree::create( diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index a2b05070bc..a5d890ab55 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -142,8 +142,10 @@ namespace gtsam { template NodePtr build(It begin, It end, ValueIt beginY, ValueIt endY) const; - /** Internal helper function to create from keys, cardinalities, - * and Y values + /** Internal helper function to create from + * keys, cardinalities, and Y values. + * Calls `build` which builds thetree bottom-up, + * before we prune in a top-down fashion. */ template NodePtr create(It begin, It end, ValueIt beginY, ValueIt endY) const; From c3090f00df89765731dbd0dc996bc5e71ccdfea6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 11:04:15 -0400 Subject: [PATCH 10/36] cleanup --- gtsam/discrete/tests/testDecisionTree.cpp | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index fb49908e2f..71daf261d1 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -524,8 +524,6 @@ TEST(DecisionTree, ApplyWithAssignment) { DT prunedTree = tree.apply(pruner); DT expectedTree(keys, "0 0 0 0 5 6 7 8"); - // expectedTree.print(); - // prunedTree.print(); EXPECT(assert_equal(expectedTree, prunedTree)); size_t count = 0; @@ -539,24 +537,6 @@ TEST(DecisionTree, ApplyWithAssignment) { EXPECT_LONGS_EQUAL(5, count); } -/* ************************************************************************** */ -// Test number of assignments. -TEST(DecisionTree, Constructor) { - using gtsam::symbol_shorthand::M; - - std::vector probs = {0, 0, 1, 2}; - - DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; - DecisionTree dt1(keys, probs); - EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); - dt1.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); - - DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; - DecisionTree dt2(keys2, probs); - std::cout << "\n" << std::endl; - dt2.print("", DefaultKeyFormatter, [](double x) { return std::to_string(x);}); -} - /* ************************************************************************** */ // Test number of assignments. TEST(DecisionTree, NrAssignments2) { From 70ffbf32bcb535f3275eba2c65318b2a62dda62f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 13:16:49 -0400 Subject: [PATCH 11/36] mark nrAssignments as const --- gtsam/discrete/DecisionTreeFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index 5fb5ae2e61..4aca10a284 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -307,7 +307,7 @@ namespace gtsam { // Get the probabilities in the decision tree so we can threshold. std::vector probabilities; this->visitLeaf([&](const Leaf& leaf) { - size_t nrAssignments = leaf.nrAssignments(); + const size_t nrAssignments = leaf.nrAssignments(); double prob = leaf.constant(); probabilities.insert(probabilities.end(), nrAssignments, prob); }); From 23520432ec68946495325a459509689e7f42a911 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 13:18:06 -0400 Subject: [PATCH 12/36] rename GTSAM_DT_NO_PRUNING to GTSAM_DT_NO_MERGING to help with disambiguation --- gtsam/discrete/DecisionTree-inl.h | 2 +- gtsam/discrete/tests/testAlgebraicDecisionTree.cpp | 2 +- gtsam/discrete/tests/testDecisionTree.cpp | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 16a926271c..7a227d8dd3 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -200,7 +200,7 @@ namespace gtsam { /// If all branches of a choice node f are the same, just return a branch. static NodePtr Unique(const ChoicePtr& f) { -#ifndef GTSAM_DT_NO_PRUNING +#ifndef GTSAM_DT_NO_MERGING // If all the branches are the same, we can merge them into one if (f->allSame_) { assert(f->branches().size() > 0); diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index c7cb7088ec..d7e7a071c1 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -20,7 +20,7 @@ #include // make sure we have traits #include // headers first to make sure no missing headers -//#define GTSAM_DT_NO_PRUNING +//#define GTSAM_DT_NO_MERGING #include #include // for convert only #define DISABLE_TIMING diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 71daf261d1..0559a782b7 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -18,7 +18,7 @@ */ // #define DT_DEBUG_MEMORY -// #define GTSAM_DT_NO_PRUNING +// #define GTSAM_DT_NO_MERGING #define DISABLE_DOT #include #include From 2998820d2cedbfb867d1a889ed4b2c3a3cdf79e6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 17:58:12 -0400 Subject: [PATCH 13/36] bottom-up Unique method that works much, much better --- gtsam/discrete/DecisionTree-inl.h | 74 +++++++++---------- .../tests/testDiscreteFactorGraph.cpp | 72 +++++++++++++++++- 2 files changed, 102 insertions(+), 44 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 7a227d8dd3..3e85ba70a3 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -199,47 +199,41 @@ namespace gtsam { } /// If all branches of a choice node f are the same, just return a branch. - static NodePtr Unique(const ChoicePtr& f) { -#ifndef GTSAM_DT_NO_MERGING - // If all the branches are the same, we can merge them into one - if (f->allSame_) { - assert(f->branches().size() > 0); - NodePtr f0 = f->branches_[0]; - - size_t nrAssignments = 0; - for(auto branch: f->branches()) { - if (auto leaf = std::dynamic_pointer_cast(branch)) { - nrAssignments += leaf->nrAssignments(); - } - } - NodePtr newLeaf( - new Leaf(std::dynamic_pointer_cast(f0)->constant(), - nrAssignments)); - return newLeaf; - - } else - // Else we recurse -#endif - { - + static NodePtr Unique(const NodePtr& node) { + if (auto choice = std::dynamic_pointer_cast(node)) { + // Choice node, we recurse! // Make non-const copy - auto ff = std::make_shared(f->label(), f->nrChoices()); + auto f = std::make_shared(choice->label(), choice->nrChoices()); // Iterate over all the branches - for (size_t i = 0; i < f->nrChoices(); i++) { - auto branch = f->branches_[i]; - if (auto leaf = std::dynamic_pointer_cast(branch)) { - // Leaf node, simply assign - ff->push_back(branch); - - } else if (auto choice = - std::dynamic_pointer_cast(branch)) { - // Choice node, we recurse - ff->push_back(Unique(choice)); - } + for (size_t i = 0; i < choice->nrChoices(); i++) { + auto branch = choice->branches_[i]; + f->push_back(Unique(branch)); } - return ff; +#ifndef GTSAM_DT_NO_MERGING + // If all the branches are the same, we can merge them into one + if (f->allSame_) { + assert(f->branches().size() > 0); + NodePtr f0 = f->branches_[0]; + + // Compute total number of assignments + size_t nrAssignments = 0; + for (auto branch : f->branches()) { + if (auto leaf = std::dynamic_pointer_cast(branch)) { + nrAssignments += leaf->nrAssignments(); + } + } + NodePtr newLeaf( + new Leaf(std::dynamic_pointer_cast(f0)->constant(), + nrAssignments)); + return newLeaf; + } +#endif + return f; + } else { + // Leaf node, return as is + return node; } } @@ -549,7 +543,7 @@ namespace gtsam { template template DecisionTree::DecisionTree( Iterator begin, Iterator end, const L& label) { - root_ = compose(begin, end, label); + root_ = Choice::Unique(compose(begin, end, label)); } /****************************************************************************/ @@ -557,7 +551,7 @@ namespace gtsam { DecisionTree::DecisionTree(const L& label, const DecisionTree& f0, const DecisionTree& f1) { const std::vector functions{f0, f1}; - root_ = compose(functions.begin(), functions.end(), label); + root_ = Choice::Unique(compose(functions.begin(), functions.end(), label)); } /****************************************************************************/ @@ -608,7 +602,7 @@ namespace gtsam { auto choiceOnLabel = std::make_shared(label, end - begin); for (Iterator it = begin; it != end; it++) choiceOnLabel->push_back(it->root_); - return Choice::Unique(choiceOnLabel); + return choiceOnLabel; } else { // Set up a new choice on the highest label auto choiceOnHighestLabel = @@ -737,7 +731,7 @@ namespace gtsam { for (auto&& branch : choice->branches()) { functions.emplace_back(convertFrom(branch, L_of_M, Y_of_X)); } - return LY::compose(functions.begin(), functions.end(), newLabel); + return Choice::Unique(LY::compose(functions.begin(), functions.end(), newLabel)); } /****************************************************************************/ diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index bbce5e8ce5..f148cf1d80 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -15,17 +15,20 @@ * @author Duy-Nguyen Ta */ +#include +#include +#include +#include #include #include -#include -#include #include - -#include +#include using namespace std; using namespace gtsam; +using symbol_shorthand::M; + /* ************************************************************************* */ TEST_UNSAFE(DiscreteFactorGraph, debugScheduler) { DiscreteKey PC(0, 4), ME(1, 4), AI(2, 4), A(3, 3); @@ -345,6 +348,67 @@ TEST(DiscreteFactorGraph, markdown) { values[1] = 0; EXPECT_DOUBLES_EQUAL(0.3, graph[0]->operator()(values), 1e-9); } + +TEST(DiscreteFactorGraph, NrAssignments) { + string expected_dfg = R"( +size: 2 +factor 0: f[ (m0,2), (m1,2), (m2,2), ] + Choice(m2) + 0 Choice(m1) + 0 0 Leaf [1] 0 + 0 1 Choice(m0) + 0 1 0 Leaf [1]0.27527634 + 0 1 1 Leaf [1]0.44944733 + 1 Choice(m1) + 1 0 Leaf [1] 0 + 1 1 Choice(m0) + 1 1 0 Leaf [1] 0 + 1 1 1 Leaf [1]0.27527634 +factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf [2] 1 + 0 0 1 Leaf [2]0.015366387 + 0 1 Choice(m1) + 0 1 0 Leaf [2] 1 + 0 1 1 Choice(m0) + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1]0.015365663 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf [2] 1 + 1 0 1 Choice(m0) + 1 0 1 0 Leaf [1]0.0094115739 + 1 0 1 1 Leaf [1]0.0094115652 + 1 1 Choice(m1) + 1 1 0 Leaf [2] 1 + 1 1 1 Choice(m0) + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1]0.009321081 +)"; + + DiscreteKeys d0{{M(2), 2}, {M(1), 2}, {M(0), 2}}; + std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; + AlgebraicDecisionTree dt(d0, p0); + //TODO(Varun) Passing ADT to DiscreteConditional causes nrAssignments to get messed up + // Issue seems to be in DecisionTreeFactor.cpp L104 + DiscreteConditional f0(3, DecisionTreeFactor(d0, dt)); + + DiscreteKeys d1{{M(0), 2}, {M(1), 2}, {M(2), 2}, {M(3), 2}}; + std::vector p1 = { + 1, 1, 1, 1, 0.015366387, 0.0094115739, 1, 1, + 1, 1, 1, 1, 0.015366387, 0.0094115652, 0.015365663, 0.009321081}; + DecisionTreeFactor f1(d1, p1); + DecisionTree dt1(d1, p1); + + DiscreteFactorGraph dfg; + dfg.add(f0); + dfg.add(f1); + + EXPECT(assert_print_equal(expected_dfg, dfg)); +} + /* ************************************************************************* */ int main() { TestResult tr; From a66e270faacb0a86520750cda8a8d838a110136e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 8 Jun 2023 18:29:46 -0400 Subject: [PATCH 14/36] print nrAssignments when printing decision trees --- gtsam/discrete/DecisionTree-inl.h | 3 +- gtsam/hybrid/MixtureFactor.h | 2 +- .../tests/testGaussianMixtureFactor.cpp | 4 +- .../tests/testHybridNonlinearFactorGraph.cpp | 38 +++++++++---------- gtsam/hybrid/tests/testMixtureFactor.cpp | 4 +- 5 files changed, 26 insertions(+), 25 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 3e85ba70a3..5c2b735a03 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -93,7 +93,8 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; + std::cout << s << " Leaf [" << nrAssignments() << "]" + << valueFormatter(constant_) << std::endl; } /** Write graphviz format to stream `os`. */ diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index df8e0193ad..529c8687b1 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -191,7 +191,7 @@ class MixtureFactor : public HybridFactor { std::cout << "\nMixtureFactor\n"; auto valueFormatter = [](const sharedFactor& v) { if (v) { - return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; + return " Nonlinear factor on " + std::to_string(v->size()) + " keys"; } else { return std::string("nullptr"); } diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 75ba5a0594..5207e9372a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -108,7 +108,7 @@ TEST(GaussianMixtureFactor, Printing) { std::string expected = R"(Hybrid [x1 x2; 1]{ Choice(1) - 0 Leaf : + 0 Leaf [1]: A[x1] = [ 0; 0 @@ -120,7 +120,7 @@ TEST(GaussianMixtureFactor, Printing) { b = [ 0 0 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x1] = [ 0; 0 diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index af3a23b947..7bcaf1762c 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -492,7 +492,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf : + 0 Leaf [1]: A[x0] = [ -1 ] @@ -502,7 +502,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x0] = [ -1 ] @@ -516,7 +516,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf : + 0 Leaf [1]: A[x1] = [ -1 ] @@ -526,7 +526,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x1] = [ -1 ] @@ -550,16 +550,16 @@ factor 4: b = [ -10 ] No noise model factor 5: P( m0 ): - Leaf 0.5 + Leaf [2] 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf 0.33333333 - 0 1 Leaf 0.6 + 0 0 Leaf [1]0.33333333 + 0 1 Leaf [1] 0.6 1 Choice(m0) - 1 0 Leaf 0.66666667 - 1 1 Leaf 0.4 + 1 0 Leaf [1]0.66666667 + 1 1 Leaf [1] 0.4 )"; EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph)); @@ -570,13 +570,13 @@ size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), Choice(m0) - 0 Leaf p(x0 | x1) + 0 Leaf [1] p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] No noise model - 1 Leaf p(x0 | x1) + 1 Leaf [1] p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] @@ -586,26 +586,26 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf p(x1 | x2) + 0 0 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] No noise model - 0 1 Leaf p(x1 | x2) + 0 1 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] No noise model 1 Choice(m0) - 1 0 Leaf p(x1 | x2) + 1 0 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] No noise model - 1 1 Leaf p(x1 | x2) + 1 1 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] @@ -615,14 +615,14 @@ conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf p(x2) + 0 0 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.1489 ] mean: 1 elements x2: -1.0099 No noise model - 0 1 Leaf p(x2) + 0 1 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.1479 ] mean: 1 elements @@ -630,14 +630,14 @@ conditional 2: Hybrid P( x2 | m0 m1) No noise model 1 Choice(m0) - 1 0 Leaf p(x2) + 1 0 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.0504 ] mean: 1 elements x2: -1.0001 No noise model - 1 1 Leaf p(x2) + 1 1 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.0494 ] mean: 1 elements diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 67a7fd8ae1..03fdccff26 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -63,8 +63,8 @@ TEST(MixtureFactor, Printing) { R"(Hybrid [x1 x2; 1] MixtureFactor Choice(1) - 0 Leaf Nonlinear factor on 2 keys - 1 Leaf Nonlinear factor on 2 keys + 0 Leaf [1] Nonlinear factor on 2 keys + 1 Leaf [1] Nonlinear factor on 2 keys )"; EXPECT(assert_print_equal(expected, mixtureFactor)); } From 76568f2d7395d46af93229ae3fe97470416ab992 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 9 Jun 2023 10:18:36 -0400 Subject: [PATCH 15/36] formatting --- gtsam/hybrid/HybridBayesTree.cpp | 3 +-- gtsam/hybrid/HybridGaussianISAM.cpp | 7 +++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index b252e613e5..dc1c875e17 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -173,8 +173,7 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { /* ************************************************************************* */ void HybridBayesTree::prune(const size_t maxNrLeaves) { - auto decisionTree = - this->roots_.at(0)->conditional()->asDiscrete(); + auto decisionTree = this->roots_.at(0)->conditional()->asDiscrete(); DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves); decisionTree->root_ = prunedDecisionTree.root_; diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index 0dd5fa38b5..6f8b7b9ffe 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -70,8 +70,7 @@ Ordering HybridGaussianISAM::GetOrdering( /* ************************************************************************* */ void HybridGaussianISAM::updateInternal( const HybridGaussianFactorGraph& newFactors, - HybridBayesTree::Cliques* orphans, - const std::optional& maxNrLeaves, + HybridBayesTree::Cliques* orphans, const std::optional& maxNrLeaves, const std::optional& ordering, const HybridBayesTree::Eliminate& function) { // Remove the contaminated part of the Bayes tree @@ -101,8 +100,8 @@ void HybridGaussianISAM::updateInternal( } // eliminate all factors (top, added, orphans) into a new Bayes tree - HybridBayesTree::shared_ptr bayesTree = - factors.eliminateMultifrontal(elimination_ordering, function, std::cref(index)); + HybridBayesTree::shared_ptr bayesTree = factors.eliminateMultifrontal( + elimination_ordering, function, std::cref(index)); if (maxNrLeaves) { bayesTree->prune(*maxNrLeaves); From 29c1816a81b245a4f599db7a47caa5a80db0f5f7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 9 Jun 2023 20:13:06 -0400 Subject: [PATCH 16/36] change to GTSAM_DT_MERGING and expose via CMake --- cmake/HandleGeneralOptions.cmake | 1 + gtsam/config.h.in | 3 +++ gtsam/discrete/DecisionTree-inl.h | 2 +- gtsam/discrete/tests/testAlgebraicDecisionTree.cpp | 1 - gtsam/discrete/tests/testDecisionTree.cpp | 1 - 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cmake/HandleGeneralOptions.cmake b/cmake/HandleGeneralOptions.cmake index 9ebb073311..c5fd9898ce 100644 --- a/cmake/HandleGeneralOptions.cmake +++ b/cmake/HandleGeneralOptions.cmake @@ -19,6 +19,7 @@ option(GTSAM_FORCE_STATIC_LIB "Force gtsam to be a static library, option(GTSAM_USE_QUATERNIONS "Enable/Disable using an internal Quaternion representation for rotations instead of rotation matrices. If enable, Rot3::EXPMAP is enforced by default." OFF) option(GTSAM_POSE3_EXPMAP "Enable/Disable using Pose3::EXPMAP as the default mode. If disabled, Pose3::FIRST_ORDER will be used." ON) option(GTSAM_ROT3_EXPMAP "Ignore if GTSAM_USE_QUATERNIONS is OFF (Rot3::EXPMAP by default). Otherwise, enable Rot3::EXPMAP, or if disabled, use Rot3::CAYLEY." ON) +option(GTSAM_DT_MERGING "Enable/Disable merging of equal leaf nodes in DecisionTrees. This leads to significant speed up and memory savings." ON) option(GTSAM_ENABLE_CONSISTENCY_CHECKS "Enable/Disable expensive consistency checks" OFF) option(GTSAM_ENABLE_MEMORY_SANITIZER "Enable/Disable memory sanitizer" OFF) option(GTSAM_WITH_TBB "Use Intel Threaded Building Blocks (TBB) if available" ON) diff --git a/gtsam/config.h.in b/gtsam/config.h.in index 7f8936d1e3..7c08d36bf8 100644 --- a/gtsam/config.h.in +++ b/gtsam/config.h.in @@ -39,6 +39,9 @@ #cmakedefine GTSAM_ROT3_EXPMAP #endif +// Whether to enable merging of equal leaf nodes in the Discrete Decision Tree. +#cmakedefine GTSAM_DT_MERGING + // Whether we are using TBB (if TBB was found and GTSAM_WITH_TBB is enabled in CMake) #cmakedefine GTSAM_USE_TBB diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 5c2b735a03..2f36007a9a 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -212,7 +212,7 @@ namespace gtsam { f->push_back(Unique(branch)); } -#ifndef GTSAM_DT_NO_MERGING +#ifdef GTSAM_DT_MERGING // If all the branches are the same, we can merge them into one if (f->allSame_) { assert(f->branches().size() > 0); diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index d7e7a071c1..55f5b61d7c 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -20,7 +20,6 @@ #include // make sure we have traits #include // headers first to make sure no missing headers -//#define GTSAM_DT_NO_MERGING #include #include // for convert only #define DISABLE_TIMING diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 0559a782b7..3369455030 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -18,7 +18,6 @@ */ // #define DT_DEBUG_MEMORY -// #define GTSAM_DT_NO_MERGING #define DISABLE_DOT #include #include From 895998268694d16fa61224481adab9fb3123cc04 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 14 Jun 2023 15:23:14 -0400 Subject: [PATCH 17/36] remove extra calls to Unique --- gtsam/discrete/DecisionTree-inl.h | 5 +++-- gtsam/discrete/tests/testDiscreteFactorGraph.cpp | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 2f36007a9a..8dc19ea218 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -544,7 +544,7 @@ namespace gtsam { template template DecisionTree::DecisionTree( Iterator begin, Iterator end, const L& label) { - root_ = Choice::Unique(compose(begin, end, label)); + root_ = compose(begin, end, label); } /****************************************************************************/ @@ -552,7 +552,7 @@ namespace gtsam { DecisionTree::DecisionTree(const L& label, const DecisionTree& f0, const DecisionTree& f1) { const std::vector functions{f0, f1}; - root_ = Choice::Unique(compose(functions.begin(), functions.end(), label)); + root_ = compose(functions.begin(), functions.end(), label); } /****************************************************************************/ @@ -603,6 +603,7 @@ namespace gtsam { auto choiceOnLabel = std::make_shared(label, end - begin); for (Iterator it = begin; it != end; it++) choiceOnLabel->push_back(it->root_); + // If no reordering, no need to call Choice::Unique return choiceOnLabel; } else { // Set up a new choice on the highest label diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index f148cf1d80..6752dbd4a9 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -400,7 +400,6 @@ factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] 1, 1, 1, 1, 0.015366387, 0.0094115739, 1, 1, 1, 1, 1, 1, 0.015366387, 0.0094115652, 0.015365663, 0.009321081}; DecisionTreeFactor f1(d1, p1); - DecisionTree dt1(d1, p1); DiscreteFactorGraph dfg; dfg.add(f0); From b37fc3f53a61a65d22ebc2dd57873e167aa7497d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Jun 2023 14:48:42 -0400 Subject: [PATCH 18/36] update DecisionTree::nrAssignments docstring --- gtsam/discrete/DecisionTree.h | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index a5d890ab55..9a8eac65e7 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -308,10 +308,15 @@ namespace gtsam { size_t nrLeaves() const; /** - * @brief Return the number of total leaf assignments. - * This includes counts removed from implicit pruning hence, - * it will always be >= nrLeaves(). - * + * @brief This is a convenience function which returns the total number of + * leaf assignments in the decision tree. + * This function is not used for anymajor operations within the discrete + * factor graph framework. + * + * Leaf assignments represent the cardinality of each leaf node, e.g. in a + * binary tree each leaf has 2 assignments. This includes counts removed + * from implicit pruning hence, it will always be >= nrLeaves(). + * * E.g. we have a decision tree as below, where each node has 2 branches: * * Choice(m1) @@ -322,9 +327,11 @@ namespace gtsam { * 1 0 Leaf 1.0 * 1 1 Leaf 2.0 * - * In the unpruned form, the tree will have 4 assignments, 2 for each key, and 4 leaves. + * In the unpruned form, the tree will have 4 assignments, 2 for each key, + * and 4 leaves. * - * In the pruned form, the number of assignments is still 4 but the number of leaves is now 3, as below: + * In the pruned form, the number of assignments is still 4 but the number + * of leaves is now 3, as below: * * Choice(m1) * 0 Leaf 0.0 From b24f20afe1b45cf75d809404c96a115ca4bfe565 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Jun 2023 18:04:53 -0400 Subject: [PATCH 19/36] fix tests to work when GTSAM_DT_MERGING=OFF --- .../tests/testAlgebraicDecisionTree.cpp | 20 +++++ gtsam/discrete/tests/testDecisionTree.cpp | 59 ++++++++++++- .../tests/testDiscreteFactorGraph.cpp | 54 ++++++++++++ gtsam/hybrid/tests/testHybridBayesNet.cpp | 4 + .../tests/testHybridNonlinearFactorGraph.cpp | 87 +++++++++++++++++++ 5 files changed, 223 insertions(+), 1 deletion(-) diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index 55f5b61d7c..19f4686c27 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -178,7 +178,11 @@ TEST(ADT, joint) { dot(joint, "Asia-ASTLBEX"); joint = apply(joint, pD, &mul); dot(joint, "Asia-ASTLBEXD"); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(346, muls); +#else + EXPECT_LONGS_EQUAL(508, muls); +#endif gttoc_(asiaJoint); tictoc_getNode(asiaJointNode, asiaJoint); elapsed = asiaJointNode->secs() + asiaJointNode->wall(); @@ -239,7 +243,11 @@ TEST(ADT, inference) { dot(joint, "Joint-Product-ASTLBEX"); joint = apply(joint, pD, &mul); dot(joint, "Joint-Product-ASTLBEXD"); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(370, (long)muls); // different ordering +#else + EXPECT_LONGS_EQUAL(508, (long)muls); // different ordering +#endif gttoc_(asiaProd); tictoc_getNode(asiaProdNode, asiaProd); elapsed = asiaProdNode->secs() + asiaProdNode->wall(); @@ -257,7 +265,11 @@ TEST(ADT, inference) { dot(marginal, "Joint-Sum-ADBLE"); marginal = marginal.combine(E, &add_); dot(marginal, "Joint-Sum-ADBL"); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(161, (long)adds); +#else + EXPECT_LONGS_EQUAL(240, (long)adds); +#endif gttoc_(asiaSum); tictoc_getNode(asiaSumNode, asiaSum); elapsed = asiaSumNode->secs() + asiaSumNode->wall(); @@ -295,7 +307,11 @@ TEST(ADT, factor_graph) { fg = apply(fg, pX, &mul); fg = apply(fg, pD, &mul); dot(fg, "FactorGraph"); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(158, (long)muls); +#else + EXPECT_LONGS_EQUAL(188, (long)muls); +#endif gttoc_(asiaFG); tictoc_getNode(asiaFGNode, asiaFG); elapsed = asiaFGNode->secs() + asiaFGNode->wall(); @@ -314,7 +330,11 @@ TEST(ADT, factor_graph) { dot(fg, "Marginalized-3E"); fg = fg.combine(L, &add_); dot(fg, "Marginalized-2L"); +#ifdef GTSAM_DT_MERGING LONGS_EQUAL(49, adds); +#else + LONGS_EQUAL(62, adds); +#endif gttoc_(marg); tictoc_getNode(margNode, marg); elapsed = margNode->secs() + margNode->wall(); diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 3369455030..653360fb7c 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -191,7 +191,11 @@ TEST(DecisionTree, example) { // Test choose 0 DT actual0 = notba.choose(A, 0); +#ifdef GTSAM_DT_MERGING EXPECT(assert_equal(DT(0.0), actual0)); +#else + // EXPECT(assert_equal(DT({0.0, 0.0}), actual0)); +#endif DOT(actual0); // Test choose 1 @@ -332,9 +336,11 @@ TEST(DecisionTree, NrAssignments) { EXPECT_LONGS_EQUAL(8, tree.nrAssignments()); +#ifdef GTSAM_DT_MERGING EXPECT(tree.root_->isLeaf()); auto leaf = std::dynamic_pointer_cast(tree.root_); EXPECT_LONGS_EQUAL(8, leaf->nrAssignments()); +#endif DT tree2({C, B, A}, "1 1 1 2 3 4 5 5"); /* The tree is @@ -357,6 +363,8 @@ TEST(DecisionTree, NrAssignments) { CHECK(root); auto choice0 = std::dynamic_pointer_cast(root->branches()[0]); CHECK(choice0); + +#ifdef GTSAM_DT_MERGING EXPECT(choice0->branches()[0]->isLeaf()); auto choice00 = std::dynamic_pointer_cast(choice0->branches()[0]); CHECK(choice00); @@ -370,6 +378,7 @@ TEST(DecisionTree, NrAssignments) { CHECK(choice11); EXPECT(choice11->isLeaf()); EXPECT_LONGS_EQUAL(2, choice11->nrAssignments()); +#endif } /* ************************************************************************** */ @@ -411,27 +420,61 @@ TEST(DecisionTree, VisitWithPruned) { }; tree.visitWith(func); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(6, choices.size()); +#else + EXPECT_LONGS_EQUAL(8, choices.size()); +#endif Assignment expectedAssignment; +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"B", 0}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(0)); +#else + expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(0)); +#endif +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(1)); +#else + expectedAssignment = {{"A", 1}, {"B", 0}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(1)); +#endif +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(2)); +#else + expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(2)); +#endif +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"B", 0}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(3)); +#else + expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(3)); +#endif +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(4)); +#else + expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 1}}; + EXPECT(expectedAssignment == choices.at(4)); +#endif +#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(5)); +#else + expectedAssignment = {{"A", 1}, {"B", 0}, {"C", 1}}; + EXPECT(expectedAssignment == choices.at(5)); +#endif } /* ************************************************************************** */ @@ -442,7 +485,11 @@ TEST(DecisionTree, fold) { DT tree(B, DT(A, 1, 1), DT(A, 2, 3)); auto add = [](const int& y, double x) { return y + x; }; double sum = tree.fold(add, 0.0); - EXPECT_DOUBLES_EQUAL(6.0, sum, 1e-9); // Note, not 7, due to pruning! +#ifdef GTSAM_DT_MERGING + EXPECT_DOUBLES_EQUAL(6.0, sum, 1e-9); // Note, not 7, due to merging! +#else + EXPECT_DOUBLES_EQUAL(7.0, sum, 1e-9); +#endif } /* ************************************************************************** */ @@ -494,9 +541,14 @@ TEST(DecisionTree, threshold) { auto threshold = [](int value) { return value < 5 ? 0 : value; }; DT thresholded(tree, threshold); +#ifdef GTSAM_DT_MERGING // Check number of leaves equal to zero now = 2 // Note: it is 2, because the pruned branches are counted as 1! EXPECT_LONGS_EQUAL(2, thresholded.fold(count, 0)); +#else + // if GTSAM_DT_MERGING is disabled, the count will be larger + EXPECT_LONGS_EQUAL(5, thresholded.fold(count, 0)); +#endif } /* ************************************************************************** */ @@ -532,8 +584,13 @@ TEST(DecisionTree, ApplyWithAssignment) { }; DT prunedTree2 = prunedTree.apply(counter); +#ifdef GTSAM_DT_MERGING // Check if apply doesn't enumerate all leaves. EXPECT_LONGS_EQUAL(5, count); +#else + // if GTSAM_DT_MERGING is disabled, the count will be full + EXPECT_LONGS_EQUAL(8, count); +#endif } /* ************************************************************************** */ diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 6752dbd4a9..33fa933d26 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -350,6 +350,7 @@ TEST(DiscreteFactorGraph, markdown) { } TEST(DiscreteFactorGraph, NrAssignments) { +#ifdef GTSAM_DT_MERGING string expected_dfg = R"( size: 2 factor 0: f[ (m0,2), (m1,2), (m2,2), ] @@ -387,6 +388,59 @@ factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] 1 1 1 0 Leaf [1] 1 1 1 1 1 Leaf [1]0.009321081 )"; +#else + string expected_dfg = R"( +size: 2 +factor 0: f[ (m0,2), (m1,2), (m2,2), ] + Choice(m2) + 0 Choice(m1) + 0 0 Choice(m0) + 0 0 0 Leaf [1] 0 + 0 0 1 Leaf [1] 0 + 0 1 Choice(m0) + 0 1 0 Leaf [1]0.27527634 + 0 1 1 Leaf [1]0.44944733 + 1 Choice(m1) + 1 0 Choice(m0) + 1 0 0 Leaf [1] 0 + 1 0 1 Leaf [1] 0 + 1 1 Choice(m0) + 1 1 0 Leaf [1] 0 + 1 1 1 Leaf [1]0.27527634 +factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Choice(m0) + 0 0 0 0 Leaf [1] 1 + 0 0 0 1 Leaf [1] 1 + 0 0 1 Choice(m0) + 0 0 1 0 Leaf [1]0.015366387 + 0 0 1 1 Leaf [1]0.015366387 + 0 1 Choice(m1) + 0 1 0 Choice(m0) + 0 1 0 0 Leaf [1] 1 + 0 1 0 1 Leaf [1] 1 + 0 1 1 Choice(m0) + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1]0.015365663 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Choice(m0) + 1 0 0 0 Leaf [1] 1 + 1 0 0 1 Leaf [1] 1 + 1 0 1 Choice(m0) + 1 0 1 0 Leaf [1]0.0094115739 + 1 0 1 1 Leaf [1]0.0094115652 + 1 1 Choice(m1) + 1 1 0 Choice(m0) + 1 1 0 0 Leaf [1] 1 + 1 1 0 1 Leaf [1] 1 + 1 1 1 Choice(m0) + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1]0.009321081 +)"; +#endif DiscreteKeys d0{{M(2), 2}, {M(1), 2}, {M(0), 2}}; std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index f25675a552..d2f39c6edd 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -288,8 +288,12 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { std::make_shared( discreteConditionals->prune(maxNrLeaves)); +#ifdef GTSAM_DT_MERGING EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree->nrLeaves()); +#else + EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree->nrLeaves()); +#endif auto original_discrete_conditionals = *(posterior->at(4)->asDiscrete()); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 7bcaf1762c..0a621c42d2 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -481,6 +481,7 @@ TEST(HybridFactorGraph, Printing) { const auto [hybridBayesNet, remainingFactorGraph] = linearizedFactorGraph.eliminatePartialSequential(ordering); +#ifdef GTSAM_DT_MERGING string expected_hybridFactorGraph = R"( size: 7 factor 0: @@ -562,6 +563,92 @@ factor 6: P( m1 | m0 ): 1 1 Leaf [1] 0.4 )"; +#else +string expected_hybridFactorGraph = R"( +size: 7 +factor 0: + A[x0] = [ + 10 +] + b = [ -10 ] + No noise model +factor 1: +Hybrid [x0 x1; m0]{ + Choice(m0) + 0 Leaf [1]: + A[x0] = [ + -1 +] + A[x1] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf [1]: + A[x0] = [ + -1 +] + A[x1] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 2: +Hybrid [x1 x2; m1]{ + Choice(m1) + 0 Leaf [1]: + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf [1]: + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 3: + A[x1] = [ + 10 +] + b = [ -10 ] + No noise model +factor 4: + A[x2] = [ + 10 +] + b = [ -10 ] + No noise model +factor 5: P( m0 ): + Choice(m0) + 0 Leaf [1] 0.5 + 1 Leaf [1] 0.5 + +factor 6: P( m1 | m0 ): + Choice(m1) + 0 Choice(m0) + 0 0 Leaf [1]0.33333333 + 0 1 Leaf [1] 0.6 + 1 Choice(m0) + 1 0 Leaf [1]0.66666667 + 1 1 Leaf [1] 0.4 + +)"; +#endif + EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph)); // Expected output for hybridBayesNet. From 8ffddc4077fd2522c4493016900c4a6425c49d2d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Jun 2023 18:05:05 -0400 Subject: [PATCH 20/36] print GTSAM_DT_MERGING cmake config --- cmake/HandlePrintConfiguration.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/HandlePrintConfiguration.cmake b/cmake/HandlePrintConfiguration.cmake index c5c3920cbb..42fae90f77 100644 --- a/cmake/HandlePrintConfiguration.cmake +++ b/cmake/HandlePrintConfiguration.cmake @@ -90,6 +90,7 @@ print_enabled_config(${GTSAM_ENABLE_CONSISTENCY_CHECKS} "Runtime consistency c print_enabled_config(${GTSAM_ENABLE_MEMORY_SANITIZER} "Build with Memory Sanitizer ") print_enabled_config(${GTSAM_ROT3_EXPMAP} "Rot3 retract is full ExpMap ") print_enabled_config(${GTSAM_POSE3_EXPMAP} "Pose3 retract is full ExpMap ") +print_enabled_config(${GTSAM_DT_MERGING} "Enable branch merging in DecisionTree") print_enabled_config(${GTSAM_ALLOW_DEPRECATED_SINCE_V43} "Allow features deprecated in GTSAM 4.3") print_enabled_config(${GTSAM_SUPPORT_NESTED_DISSECTION} "Metis-based Nested Dissection ") print_enabled_config(${GTSAM_TANGENT_PREINTEGRATION} "Use tangent-space preintegration") From e5fea0da5204a1d9e48226e9eef4175a77e3d8fd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Jun 2023 18:16:43 -0400 Subject: [PATCH 21/36] update docstring --- gtsam/discrete/DecisionTree-inl.h | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 8dc19ea218..b65cc6bcf8 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -199,11 +199,26 @@ namespace gtsam { #endif } - /// If all branches of a choice node f are the same, just return a branch. + /** + * @brief Merge branches with equal leaf values for every choice node in a + * decision tree. If all branches are the same (i.e. have the same leaf + * value), replace the choice node with the equivalent leaf node. + * + * This function applies the branch merging (if enabled) recursively on the + * decision tree represented by the root node passed in as the argument. It + * recurses to the leaf nodes and merges branches with equal leaf values in + * a bottom-up fashion. + * + * Thus, if all branches of a choice node `f` are the same, + * just return a single branch at each recursion step. + * + * @param node The root node of the decision tree. + * @return NodePtr + */ static NodePtr Unique(const NodePtr& node) { if (auto choice = std::dynamic_pointer_cast(node)) { // Choice node, we recurse! - // Make non-const copy + // Make non-const copy so we can update auto f = std::make_shared(choice->label(), choice->nrChoices()); // Iterate over all the branches From 9b7f4b3f54b4be1e913fd44cc31d4fd437836d27 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Jun 2023 10:12:13 -0400 Subject: [PATCH 22/36] fix test case --- gtsam/discrete/DecisionTree-inl.h | 10 +++++----- gtsam/discrete/tests/testDiscreteFactorGraph.cpp | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index b65cc6bcf8..156177d039 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -137,7 +137,9 @@ namespace gtsam { // Applying binary operator to two leaves results in a leaf NodePtr apply_g_op_fL(const Leaf& fL, const Binary& op) const override { // fL op gL - NodePtr h(new Leaf(op(fL.constant_, constant_), nrAssignments_)); + // TODO(Varun) nrAssignments setting is not correct. + // Depending on f and g, the nrAssignments can be different. This is a bug! + NodePtr h(new Leaf(op(fL.constant_, constant_), fL.nrAssignments())); return h; } @@ -496,13 +498,11 @@ namespace gtsam { // DecisionTree /****************************************************************************/ template - DecisionTree::DecisionTree() { - } + DecisionTree::DecisionTree() {} template DecisionTree::DecisionTree(const NodePtr& root) : - root_(root) { - } + root_(root) {} /****************************************************************************/ template diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 33fa933d26..6e86215957 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -356,14 +356,14 @@ size: 2 factor 0: f[ (m0,2), (m1,2), (m2,2), ] Choice(m2) 0 Choice(m1) - 0 0 Leaf [1] 0 + 0 0 Leaf [2] 0 0 1 Choice(m0) 0 1 0 Leaf [1]0.27527634 - 0 1 1 Leaf [1]0.44944733 + 0 1 1 Leaf [1] 0 1 Choice(m1) - 1 0 Leaf [1] 0 + 1 0 Leaf [2] 0 1 1 Choice(m0) - 1 1 0 Leaf [1] 0 + 1 1 0 Leaf [1]0.44944733 1 1 1 Leaf [1]0.27527634 factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] Choice(m3) @@ -442,7 +442,7 @@ factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] )"; #endif - DiscreteKeys d0{{M(2), 2}, {M(1), 2}, {M(0), 2}}; + DiscreteKeys d0{{M(0), 2}, {M(1), 2}, {M(2), 2}}; std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; AlgebraicDecisionTree dt(d0, p0); //TODO(Varun) Passing ADT to DiscreteConditional causes nrAssignments to get messed up From 8c38e45c83c6bbeb2ef5e7c7d73807b2eed6831f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Jun 2023 12:19:41 -0400 Subject: [PATCH 23/36] enumerate all assignments for computing probabilities to prune --- gtsam/discrete/DecisionTreeFactor.cpp | 11 ++-- .../tests/testGaussianMixtureFactor.cpp | 4 +- .../tests/testHybridNonlinearFactorGraph.cpp | 58 +++++++++---------- gtsam/hybrid/tests/testMixtureFactor.cpp | 4 +- 4 files changed, 39 insertions(+), 38 deletions(-) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index 4aca10a284..6f20907e14 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -306,11 +306,12 @@ namespace gtsam { // Get the probabilities in the decision tree so we can threshold. std::vector probabilities; - this->visitLeaf([&](const Leaf& leaf) { - const size_t nrAssignments = leaf.nrAssignments(); - double prob = leaf.constant(); - probabilities.insert(probabilities.end(), nrAssignments, prob); - }); + // NOTE(Varun) this is potentially slow due to the cartesian product + auto allValues = DiscreteValues::CartesianProduct(this->discreteKeys()); + for (auto&& val : allValues) { + double prob = (*this)(val); + probabilities.push_back(prob); + } // The number of probabilities can be lower than max_leaves if (probabilities.size() <= N) { diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 5207e9372a..75ba5a0594 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -108,7 +108,7 @@ TEST(GaussianMixtureFactor, Printing) { std::string expected = R"(Hybrid [x1 x2; 1]{ Choice(1) - 0 Leaf [1]: + 0 Leaf : A[x1] = [ 0; 0 @@ -120,7 +120,7 @@ TEST(GaussianMixtureFactor, Printing) { b = [ 0 0 ] No noise model - 1 Leaf [1]: + 1 Leaf : A[x1] = [ 0; 0 diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 0a621c42d2..29136c62ad 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -493,7 +493,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf [1]: + 0 Leaf : A[x0] = [ -1 ] @@ -503,7 +503,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf : A[x0] = [ -1 ] @@ -517,7 +517,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf [1]: + 0 Leaf : A[x1] = [ -1 ] @@ -527,7 +527,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf : A[x1] = [ -1 ] @@ -551,16 +551,16 @@ factor 4: b = [ -10 ] No noise model factor 5: P( m0 ): - Leaf [2] 0.5 + Leaf 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf [1]0.33333333 - 0 1 Leaf [1] 0.6 + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 1 Choice(m0) - 1 0 Leaf [1]0.66666667 - 1 1 Leaf [1] 0.4 + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 )"; #else @@ -575,7 +575,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf [1]: + 0 Leaf : A[x0] = [ -1 ] @@ -585,7 +585,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf : A[x0] = [ -1 ] @@ -599,7 +599,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf [1]: + 0 Leaf : A[x1] = [ -1 ] @@ -609,7 +609,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf : A[x1] = [ -1 ] @@ -634,17 +634,17 @@ factor 4: No noise model factor 5: P( m0 ): Choice(m0) - 0 Leaf [1] 0.5 - 1 Leaf [1] 0.5 + 0 Leaf 0.5 + 1 Leaf 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf [1]0.33333333 - 0 1 Leaf [1] 0.6 + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 1 Choice(m0) - 1 0 Leaf [1]0.66666667 - 1 1 Leaf [1] 0.4 + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 )"; #endif @@ -657,13 +657,13 @@ size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), Choice(m0) - 0 Leaf [1] p(x0 | x1) + 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] No noise model - 1 Leaf [1] p(x0 | x1) + 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] @@ -673,26 +673,26 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf [1] p(x1 | x2) + 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] No noise model - 0 1 Leaf [1] p(x1 | x2) + 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] No noise model 1 Choice(m0) - 1 0 Leaf [1] p(x1 | x2) + 1 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] No noise model - 1 1 Leaf [1] p(x1 | x2) + 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] @@ -702,14 +702,14 @@ conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf [1] p(x2) + 0 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1489 ] mean: 1 elements x2: -1.0099 No noise model - 0 1 Leaf [1] p(x2) + 0 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1479 ] mean: 1 elements @@ -717,14 +717,14 @@ conditional 2: Hybrid P( x2 | m0 m1) No noise model 1 Choice(m0) - 1 0 Leaf [1] p(x2) + 1 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0504 ] mean: 1 elements x2: -1.0001 No noise model - 1 1 Leaf [1] p(x2) + 1 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0494 ] mean: 1 elements diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 03fdccff26..0827e4eedb 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -63,8 +63,8 @@ TEST(MixtureFactor, Printing) { R"(Hybrid [x1 x2; 1] MixtureFactor Choice(1) - 0 Leaf [1] Nonlinear factor on 2 keys - 1 Leaf [1] Nonlinear factor on 2 keys + 0 Leaf Nonlinear factor on 2 keys + 1 Leaf Nonlinear factor on 2 keys )"; EXPECT(assert_print_equal(expected, mixtureFactor)); } From 647d3c0744171ffa78f585a39a88bfa4be4d2002 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Jun 2023 13:26:07 -0400 Subject: [PATCH 24/36] remove nrAssignments from the DecisionTree --- gtsam/discrete/DecisionTree-inl.h | 47 ++------ gtsam/discrete/DecisionTree.h | 36 ------ gtsam/discrete/tests/testDecisionTree.cpp | 85 ------------- .../tests/testDiscreteFactorGraph.cpp | 113 ------------------ 4 files changed, 8 insertions(+), 273 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 156177d039..10532284cf 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -53,26 +53,17 @@ namespace gtsam { /** constant stored in this leaf */ Y constant_; - /** The number of assignments contained within this leaf. - * Particularly useful when leaves have been pruned. - */ - size_t nrAssignments_; - /// Default constructor for serialization. Leaf() {} /// Constructor from constant - Leaf(const Y& constant, size_t nrAssignments = 1) - : constant_(constant), nrAssignments_(nrAssignments) {} + Leaf(const Y& constant) : constant_(constant) {} /// Return the constant const Y& constant() const { return constant_; } - /// Return the number of assignments contained within this leaf. - size_t nrAssignments() const { return nrAssignments_; } - /// Leaf-Leaf equality bool sameLeaf(const Leaf& q) const override { return constant_ == q.constant_; @@ -93,8 +84,7 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf [" << nrAssignments() << "]" - << valueFormatter(constant_) << std::endl; + std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; } /** Write graphviz format to stream `os`. */ @@ -114,14 +104,14 @@ namespace gtsam { /** apply unary operator */ NodePtr apply(const Unary& op) const override { - NodePtr f(new Leaf(op(constant_), nrAssignments_)); + NodePtr f(new Leaf(op(constant_))); return f; } /// Apply unary operator with assignment NodePtr apply(const UnaryAssignment& op, const Assignment& assignment) const override { - NodePtr f(new Leaf(op(assignment, constant_), nrAssignments_)); + NodePtr f(new Leaf(op(assignment, constant_))); return f; } @@ -137,9 +127,7 @@ namespace gtsam { // Applying binary operator to two leaves results in a leaf NodePtr apply_g_op_fL(const Leaf& fL, const Binary& op) const override { // fL op gL - // TODO(Varun) nrAssignments setting is not correct. - // Depending on f and g, the nrAssignments can be different. This is a bug! - NodePtr h(new Leaf(op(fL.constant_, constant_), fL.nrAssignments())); + NodePtr h(new Leaf(op(fL.constant_, constant_))); return h; } @@ -150,7 +138,7 @@ namespace gtsam { /** choose a branch, create new memory ! */ NodePtr choose(const L& label, size_t index) const override { - return NodePtr(new Leaf(constant(), nrAssignments())); + return NodePtr(new Leaf(constant())); } bool isLeaf() const override { return true; } @@ -165,7 +153,6 @@ namespace gtsam { void serialize(ARCHIVE& ar, const unsigned int /*version*/) { ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); ar& BOOST_SERIALIZATION_NVP(constant_); - ar& BOOST_SERIALIZATION_NVP(nrAssignments_); } #endif }; // Leaf @@ -235,16 +222,8 @@ namespace gtsam { assert(f->branches().size() > 0); NodePtr f0 = f->branches_[0]; - // Compute total number of assignments - size_t nrAssignments = 0; - for (auto branch : f->branches()) { - if (auto leaf = std::dynamic_pointer_cast(branch)) { - nrAssignments += leaf->nrAssignments(); - } - } NodePtr newLeaf( - new Leaf(std::dynamic_pointer_cast(f0)->constant(), - nrAssignments)); + new Leaf(std::dynamic_pointer_cast(f0)->constant())); return newLeaf; } #endif @@ -730,7 +709,7 @@ namespace gtsam { // If leaf, apply unary conversion "op" and create a unique leaf. using MXLeaf = typename DecisionTree::Leaf; if (auto leaf = std::dynamic_pointer_cast(f)) { - return NodePtr(new Leaf(Y_of_X(leaf->constant()), leaf->nrAssignments())); + return NodePtr(new Leaf(Y_of_X(leaf->constant()))); } // Check if Choice @@ -877,16 +856,6 @@ namespace gtsam { return total; } - /****************************************************************************/ - template - size_t DecisionTree::nrAssignments() const { - size_t n = 0; - this->visitLeaf([&n](const DecisionTree::Leaf& leaf) { - n += leaf.nrAssignments(); - }); - return n; - } - /****************************************************************************/ // fold is just done with a visit template diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 9a8eac65e7..733ab1ad1f 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -307,42 +307,6 @@ namespace gtsam { /// Return the number of leaves in the tree. size_t nrLeaves() const; - /** - * @brief This is a convenience function which returns the total number of - * leaf assignments in the decision tree. - * This function is not used for anymajor operations within the discrete - * factor graph framework. - * - * Leaf assignments represent the cardinality of each leaf node, e.g. in a - * binary tree each leaf has 2 assignments. This includes counts removed - * from implicit pruning hence, it will always be >= nrLeaves(). - * - * E.g. we have a decision tree as below, where each node has 2 branches: - * - * Choice(m1) - * 0 Choice(m0) - * 0 0 Leaf 0.0 - * 0 1 Leaf 0.0 - * 1 Choice(m0) - * 1 0 Leaf 1.0 - * 1 1 Leaf 2.0 - * - * In the unpruned form, the tree will have 4 assignments, 2 for each key, - * and 4 leaves. - * - * In the pruned form, the number of assignments is still 4 but the number - * of leaves is now 3, as below: - * - * Choice(m1) - * 0 Leaf 0.0 - * 1 Choice(m0) - * 1 0 Leaf 1.0 - * 1 1 Leaf 2.0 - * - * @return size_t - */ - size_t nrAssignments() const; - /** * @brief Fold a binary function over the tree, returning accumulator. * diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 653360fb7c..cdbad8c1c0 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -328,59 +328,6 @@ TEST(DecisionTree, Containers) { StringContainerTree converted(stringIntTree, container_of_int); } -/* ************************************************************************** */ -// Test nrAssignments. -TEST(DecisionTree, NrAssignments) { - const std::pair A("A", 2), B("B", 2), C("C", 2); - DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); - - EXPECT_LONGS_EQUAL(8, tree.nrAssignments()); - -#ifdef GTSAM_DT_MERGING - EXPECT(tree.root_->isLeaf()); - auto leaf = std::dynamic_pointer_cast(tree.root_); - EXPECT_LONGS_EQUAL(8, leaf->nrAssignments()); -#endif - - DT tree2({C, B, A}, "1 1 1 2 3 4 5 5"); - /* The tree is - Choice(C) - 0 Choice(B) - 0 0 Leaf 1 - 0 1 Choice(A) - 0 1 0 Leaf 1 - 0 1 1 Leaf 2 - 1 Choice(B) - 1 0 Choice(A) - 1 0 0 Leaf 3 - 1 0 1 Leaf 4 - 1 1 Leaf 5 - */ - - EXPECT_LONGS_EQUAL(8, tree2.nrAssignments()); - - auto root = std::dynamic_pointer_cast(tree2.root_); - CHECK(root); - auto choice0 = std::dynamic_pointer_cast(root->branches()[0]); - CHECK(choice0); - -#ifdef GTSAM_DT_MERGING - EXPECT(choice0->branches()[0]->isLeaf()); - auto choice00 = std::dynamic_pointer_cast(choice0->branches()[0]); - CHECK(choice00); - EXPECT_LONGS_EQUAL(2, choice00->nrAssignments()); - - auto choice1 = std::dynamic_pointer_cast(root->branches()[1]); - CHECK(choice1); - auto choice10 = std::dynamic_pointer_cast(choice1->branches()[0]); - CHECK(choice10); - auto choice11 = std::dynamic_pointer_cast(choice1->branches()[1]); - CHECK(choice11); - EXPECT(choice11->isLeaf()); - EXPECT_LONGS_EQUAL(2, choice11->nrAssignments()); -#endif -} - /* ************************************************************************** */ // Test visit. TEST(DecisionTree, visit) { @@ -593,38 +540,6 @@ TEST(DecisionTree, ApplyWithAssignment) { #endif } -/* ************************************************************************** */ -// Test number of assignments. -TEST(DecisionTree, NrAssignments2) { - using gtsam::symbol_shorthand::M; - - std::vector probs = {0, 0, 1, 2}; - - /* Create the decision tree - Choice(m1) - 0 Leaf 0.000000 - 1 Choice(m0) - 1 0 Leaf 1.000000 - 1 1 Leaf 2.000000 - */ - DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; - DecisionTree dt1(keys, probs); - EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); - - /* Create the DecisionTree - Choice(m1) - 0 Choice(m0) - 0 0 Leaf 0.000000 - 0 1 Leaf 1.000000 - 1 Choice(m0) - 1 0 Leaf 0.000000 - 1 1 Leaf 2.000000 - */ - DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; - DecisionTree dt2(keys2, probs); - EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 6e86215957..341eb63e38 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -349,119 +349,6 @@ TEST(DiscreteFactorGraph, markdown) { EXPECT_DOUBLES_EQUAL(0.3, graph[0]->operator()(values), 1e-9); } -TEST(DiscreteFactorGraph, NrAssignments) { -#ifdef GTSAM_DT_MERGING - string expected_dfg = R"( -size: 2 -factor 0: f[ (m0,2), (m1,2), (m2,2), ] - Choice(m2) - 0 Choice(m1) - 0 0 Leaf [2] 0 - 0 1 Choice(m0) - 0 1 0 Leaf [1]0.27527634 - 0 1 1 Leaf [1] 0 - 1 Choice(m1) - 1 0 Leaf [2] 0 - 1 1 Choice(m0) - 1 1 0 Leaf [1]0.44944733 - 1 1 1 Leaf [1]0.27527634 -factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] - Choice(m3) - 0 Choice(m2) - 0 0 Choice(m1) - 0 0 0 Leaf [2] 1 - 0 0 1 Leaf [2]0.015366387 - 0 1 Choice(m1) - 0 1 0 Leaf [2] 1 - 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1]0.015365663 - 1 Choice(m2) - 1 0 Choice(m1) - 1 0 0 Leaf [2] 1 - 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1]0.0094115739 - 1 0 1 1 Leaf [1]0.0094115652 - 1 1 Choice(m1) - 1 1 0 Leaf [2] 1 - 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1]0.009321081 -)"; -#else - string expected_dfg = R"( -size: 2 -factor 0: f[ (m0,2), (m1,2), (m2,2), ] - Choice(m2) - 0 Choice(m1) - 0 0 Choice(m0) - 0 0 0 Leaf [1] 0 - 0 0 1 Leaf [1] 0 - 0 1 Choice(m0) - 0 1 0 Leaf [1]0.27527634 - 0 1 1 Leaf [1]0.44944733 - 1 Choice(m1) - 1 0 Choice(m0) - 1 0 0 Leaf [1] 0 - 1 0 1 Leaf [1] 0 - 1 1 Choice(m0) - 1 1 0 Leaf [1] 0 - 1 1 1 Leaf [1]0.27527634 -factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] - Choice(m3) - 0 Choice(m2) - 0 0 Choice(m1) - 0 0 0 Choice(m0) - 0 0 0 0 Leaf [1] 1 - 0 0 0 1 Leaf [1] 1 - 0 0 1 Choice(m0) - 0 0 1 0 Leaf [1]0.015366387 - 0 0 1 1 Leaf [1]0.015366387 - 0 1 Choice(m1) - 0 1 0 Choice(m0) - 0 1 0 0 Leaf [1] 1 - 0 1 0 1 Leaf [1] 1 - 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1]0.015365663 - 1 Choice(m2) - 1 0 Choice(m1) - 1 0 0 Choice(m0) - 1 0 0 0 Leaf [1] 1 - 1 0 0 1 Leaf [1] 1 - 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1]0.0094115739 - 1 0 1 1 Leaf [1]0.0094115652 - 1 1 Choice(m1) - 1 1 0 Choice(m0) - 1 1 0 0 Leaf [1] 1 - 1 1 0 1 Leaf [1] 1 - 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1]0.009321081 -)"; -#endif - - DiscreteKeys d0{{M(0), 2}, {M(1), 2}, {M(2), 2}}; - std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; - AlgebraicDecisionTree dt(d0, p0); - //TODO(Varun) Passing ADT to DiscreteConditional causes nrAssignments to get messed up - // Issue seems to be in DecisionTreeFactor.cpp L104 - DiscreteConditional f0(3, DecisionTreeFactor(d0, dt)); - - DiscreteKeys d1{{M(0), 2}, {M(1), 2}, {M(2), 2}, {M(3), 2}}; - std::vector p1 = { - 1, 1, 1, 1, 0.015366387, 0.0094115739, 1, 1, - 1, 1, 1, 1, 0.015366387, 0.0094115652, 0.015365663, 0.009321081}; - DecisionTreeFactor f1(d1, p1); - - DiscreteFactorGraph dfg; - dfg.add(f0); - dfg.add(f1); - - EXPECT(assert_print_equal(expected_dfg, dfg)); -} - /* ************************************************************************* */ int main() { TestResult tr; From 2db08281c65d3f47feeb76bc467f9649f3c29a6e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 10 Jul 2023 19:39:28 -0400 Subject: [PATCH 25/36] Revert "remove nrAssignments from the DecisionTree" This reverts commit 647d3c0744171ffa78f585a39a88bfa4be4d2002. --- gtsam/discrete/DecisionTree-inl.h | 47 ++++++-- gtsam/discrete/DecisionTree.h | 36 ++++++ gtsam/discrete/tests/testDecisionTree.cpp | 85 +++++++++++++ .../tests/testDiscreteFactorGraph.cpp | 113 ++++++++++++++++++ 4 files changed, 273 insertions(+), 8 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 10532284cf..156177d039 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -53,17 +53,26 @@ namespace gtsam { /** constant stored in this leaf */ Y constant_; + /** The number of assignments contained within this leaf. + * Particularly useful when leaves have been pruned. + */ + size_t nrAssignments_; + /// Default constructor for serialization. Leaf() {} /// Constructor from constant - Leaf(const Y& constant) : constant_(constant) {} + Leaf(const Y& constant, size_t nrAssignments = 1) + : constant_(constant), nrAssignments_(nrAssignments) {} /// Return the constant const Y& constant() const { return constant_; } + /// Return the number of assignments contained within this leaf. + size_t nrAssignments() const { return nrAssignments_; } + /// Leaf-Leaf equality bool sameLeaf(const Leaf& q) const override { return constant_ == q.constant_; @@ -84,7 +93,8 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; + std::cout << s << " Leaf [" << nrAssignments() << "]" + << valueFormatter(constant_) << std::endl; } /** Write graphviz format to stream `os`. */ @@ -104,14 +114,14 @@ namespace gtsam { /** apply unary operator */ NodePtr apply(const Unary& op) const override { - NodePtr f(new Leaf(op(constant_))); + NodePtr f(new Leaf(op(constant_), nrAssignments_)); return f; } /// Apply unary operator with assignment NodePtr apply(const UnaryAssignment& op, const Assignment& assignment) const override { - NodePtr f(new Leaf(op(assignment, constant_))); + NodePtr f(new Leaf(op(assignment, constant_), nrAssignments_)); return f; } @@ -127,7 +137,9 @@ namespace gtsam { // Applying binary operator to two leaves results in a leaf NodePtr apply_g_op_fL(const Leaf& fL, const Binary& op) const override { // fL op gL - NodePtr h(new Leaf(op(fL.constant_, constant_))); + // TODO(Varun) nrAssignments setting is not correct. + // Depending on f and g, the nrAssignments can be different. This is a bug! + NodePtr h(new Leaf(op(fL.constant_, constant_), fL.nrAssignments())); return h; } @@ -138,7 +150,7 @@ namespace gtsam { /** choose a branch, create new memory ! */ NodePtr choose(const L& label, size_t index) const override { - return NodePtr(new Leaf(constant())); + return NodePtr(new Leaf(constant(), nrAssignments())); } bool isLeaf() const override { return true; } @@ -153,6 +165,7 @@ namespace gtsam { void serialize(ARCHIVE& ar, const unsigned int /*version*/) { ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); ar& BOOST_SERIALIZATION_NVP(constant_); + ar& BOOST_SERIALIZATION_NVP(nrAssignments_); } #endif }; // Leaf @@ -222,8 +235,16 @@ namespace gtsam { assert(f->branches().size() > 0); NodePtr f0 = f->branches_[0]; + // Compute total number of assignments + size_t nrAssignments = 0; + for (auto branch : f->branches()) { + if (auto leaf = std::dynamic_pointer_cast(branch)) { + nrAssignments += leaf->nrAssignments(); + } + } NodePtr newLeaf( - new Leaf(std::dynamic_pointer_cast(f0)->constant())); + new Leaf(std::dynamic_pointer_cast(f0)->constant(), + nrAssignments)); return newLeaf; } #endif @@ -709,7 +730,7 @@ namespace gtsam { // If leaf, apply unary conversion "op" and create a unique leaf. using MXLeaf = typename DecisionTree::Leaf; if (auto leaf = std::dynamic_pointer_cast(f)) { - return NodePtr(new Leaf(Y_of_X(leaf->constant()))); + return NodePtr(new Leaf(Y_of_X(leaf->constant()), leaf->nrAssignments())); } // Check if Choice @@ -856,6 +877,16 @@ namespace gtsam { return total; } + /****************************************************************************/ + template + size_t DecisionTree::nrAssignments() const { + size_t n = 0; + this->visitLeaf([&n](const DecisionTree::Leaf& leaf) { + n += leaf.nrAssignments(); + }); + return n; + } + /****************************************************************************/ // fold is just done with a visit template diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 733ab1ad1f..9a8eac65e7 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -307,6 +307,42 @@ namespace gtsam { /// Return the number of leaves in the tree. size_t nrLeaves() const; + /** + * @brief This is a convenience function which returns the total number of + * leaf assignments in the decision tree. + * This function is not used for anymajor operations within the discrete + * factor graph framework. + * + * Leaf assignments represent the cardinality of each leaf node, e.g. in a + * binary tree each leaf has 2 assignments. This includes counts removed + * from implicit pruning hence, it will always be >= nrLeaves(). + * + * E.g. we have a decision tree as below, where each node has 2 branches: + * + * Choice(m1) + * 0 Choice(m0) + * 0 0 Leaf 0.0 + * 0 1 Leaf 0.0 + * 1 Choice(m0) + * 1 0 Leaf 1.0 + * 1 1 Leaf 2.0 + * + * In the unpruned form, the tree will have 4 assignments, 2 for each key, + * and 4 leaves. + * + * In the pruned form, the number of assignments is still 4 but the number + * of leaves is now 3, as below: + * + * Choice(m1) + * 0 Leaf 0.0 + * 1 Choice(m0) + * 1 0 Leaf 1.0 + * 1 1 Leaf 2.0 + * + * @return size_t + */ + size_t nrAssignments() const; + /** * @brief Fold a binary function over the tree, returning accumulator. * diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index cdbad8c1c0..653360fb7c 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -328,6 +328,59 @@ TEST(DecisionTree, Containers) { StringContainerTree converted(stringIntTree, container_of_int); } +/* ************************************************************************** */ +// Test nrAssignments. +TEST(DecisionTree, NrAssignments) { + const std::pair A("A", 2), B("B", 2), C("C", 2); + DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); + + EXPECT_LONGS_EQUAL(8, tree.nrAssignments()); + +#ifdef GTSAM_DT_MERGING + EXPECT(tree.root_->isLeaf()); + auto leaf = std::dynamic_pointer_cast(tree.root_); + EXPECT_LONGS_EQUAL(8, leaf->nrAssignments()); +#endif + + DT tree2({C, B, A}, "1 1 1 2 3 4 5 5"); + /* The tree is + Choice(C) + 0 Choice(B) + 0 0 Leaf 1 + 0 1 Choice(A) + 0 1 0 Leaf 1 + 0 1 1 Leaf 2 + 1 Choice(B) + 1 0 Choice(A) + 1 0 0 Leaf 3 + 1 0 1 Leaf 4 + 1 1 Leaf 5 + */ + + EXPECT_LONGS_EQUAL(8, tree2.nrAssignments()); + + auto root = std::dynamic_pointer_cast(tree2.root_); + CHECK(root); + auto choice0 = std::dynamic_pointer_cast(root->branches()[0]); + CHECK(choice0); + +#ifdef GTSAM_DT_MERGING + EXPECT(choice0->branches()[0]->isLeaf()); + auto choice00 = std::dynamic_pointer_cast(choice0->branches()[0]); + CHECK(choice00); + EXPECT_LONGS_EQUAL(2, choice00->nrAssignments()); + + auto choice1 = std::dynamic_pointer_cast(root->branches()[1]); + CHECK(choice1); + auto choice10 = std::dynamic_pointer_cast(choice1->branches()[0]); + CHECK(choice10); + auto choice11 = std::dynamic_pointer_cast(choice1->branches()[1]); + CHECK(choice11); + EXPECT(choice11->isLeaf()); + EXPECT_LONGS_EQUAL(2, choice11->nrAssignments()); +#endif +} + /* ************************************************************************** */ // Test visit. TEST(DecisionTree, visit) { @@ -540,6 +593,38 @@ TEST(DecisionTree, ApplyWithAssignment) { #endif } +/* ************************************************************************** */ +// Test number of assignments. +TEST(DecisionTree, NrAssignments2) { + using gtsam::symbol_shorthand::M; + + std::vector probs = {0, 0, 1, 2}; + + /* Create the decision tree + Choice(m1) + 0 Leaf 0.000000 + 1 Choice(m0) + 1 0 Leaf 1.000000 + 1 1 Leaf 2.000000 + */ + DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; + DecisionTree dt1(keys, probs); + EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); + + /* Create the DecisionTree + Choice(m1) + 0 Choice(m0) + 0 0 Leaf 0.000000 + 0 1 Leaf 1.000000 + 1 Choice(m0) + 1 0 Leaf 0.000000 + 1 1 Leaf 2.000000 + */ + DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; + DecisionTree dt2(keys2, probs); + EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 341eb63e38..6e86215957 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -349,6 +349,119 @@ TEST(DiscreteFactorGraph, markdown) { EXPECT_DOUBLES_EQUAL(0.3, graph[0]->operator()(values), 1e-9); } +TEST(DiscreteFactorGraph, NrAssignments) { +#ifdef GTSAM_DT_MERGING + string expected_dfg = R"( +size: 2 +factor 0: f[ (m0,2), (m1,2), (m2,2), ] + Choice(m2) + 0 Choice(m1) + 0 0 Leaf [2] 0 + 0 1 Choice(m0) + 0 1 0 Leaf [1]0.27527634 + 0 1 1 Leaf [1] 0 + 1 Choice(m1) + 1 0 Leaf [2] 0 + 1 1 Choice(m0) + 1 1 0 Leaf [1]0.44944733 + 1 1 1 Leaf [1]0.27527634 +factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf [2] 1 + 0 0 1 Leaf [2]0.015366387 + 0 1 Choice(m1) + 0 1 0 Leaf [2] 1 + 0 1 1 Choice(m0) + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1]0.015365663 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf [2] 1 + 1 0 1 Choice(m0) + 1 0 1 0 Leaf [1]0.0094115739 + 1 0 1 1 Leaf [1]0.0094115652 + 1 1 Choice(m1) + 1 1 0 Leaf [2] 1 + 1 1 1 Choice(m0) + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1]0.009321081 +)"; +#else + string expected_dfg = R"( +size: 2 +factor 0: f[ (m0,2), (m1,2), (m2,2), ] + Choice(m2) + 0 Choice(m1) + 0 0 Choice(m0) + 0 0 0 Leaf [1] 0 + 0 0 1 Leaf [1] 0 + 0 1 Choice(m0) + 0 1 0 Leaf [1]0.27527634 + 0 1 1 Leaf [1]0.44944733 + 1 Choice(m1) + 1 0 Choice(m0) + 1 0 0 Leaf [1] 0 + 1 0 1 Leaf [1] 0 + 1 1 Choice(m0) + 1 1 0 Leaf [1] 0 + 1 1 1 Leaf [1]0.27527634 +factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Choice(m0) + 0 0 0 0 Leaf [1] 1 + 0 0 0 1 Leaf [1] 1 + 0 0 1 Choice(m0) + 0 0 1 0 Leaf [1]0.015366387 + 0 0 1 1 Leaf [1]0.015366387 + 0 1 Choice(m1) + 0 1 0 Choice(m0) + 0 1 0 0 Leaf [1] 1 + 0 1 0 1 Leaf [1] 1 + 0 1 1 Choice(m0) + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1]0.015365663 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Choice(m0) + 1 0 0 0 Leaf [1] 1 + 1 0 0 1 Leaf [1] 1 + 1 0 1 Choice(m0) + 1 0 1 0 Leaf [1]0.0094115739 + 1 0 1 1 Leaf [1]0.0094115652 + 1 1 Choice(m1) + 1 1 0 Choice(m0) + 1 1 0 0 Leaf [1] 1 + 1 1 0 1 Leaf [1] 1 + 1 1 1 Choice(m0) + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1]0.009321081 +)"; +#endif + + DiscreteKeys d0{{M(0), 2}, {M(1), 2}, {M(2), 2}}; + std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; + AlgebraicDecisionTree dt(d0, p0); + //TODO(Varun) Passing ADT to DiscreteConditional causes nrAssignments to get messed up + // Issue seems to be in DecisionTreeFactor.cpp L104 + DiscreteConditional f0(3, DecisionTreeFactor(d0, dt)); + + DiscreteKeys d1{{M(0), 2}, {M(1), 2}, {M(2), 2}, {M(3), 2}}; + std::vector p1 = { + 1, 1, 1, 1, 0.015366387, 0.0094115739, 1, 1, + 1, 1, 1, 1, 0.015366387, 0.0094115652, 0.015365663, 0.009321081}; + DecisionTreeFactor f1(d1, p1); + + DiscreteFactorGraph dfg; + dfg.add(f0); + dfg.add(f1); + + EXPECT(assert_print_equal(expected_dfg, dfg)); +} + /* ************************************************************************* */ int main() { TestResult tr; From b7deefd744b8a5cb544d863eb1e6f35da56f7bff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 10 Jul 2023 19:39:36 -0400 Subject: [PATCH 26/36] Revert "enumerate all assignments for computing probabilities to prune" This reverts commit 8c38e45c83c6bbeb2ef5e7c7d73807b2eed6831f. --- gtsam/discrete/DecisionTreeFactor.cpp | 11 ++-- .../tests/testGaussianMixtureFactor.cpp | 4 +- .../tests/testHybridNonlinearFactorGraph.cpp | 58 +++++++++---------- gtsam/hybrid/tests/testMixtureFactor.cpp | 4 +- 4 files changed, 38 insertions(+), 39 deletions(-) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index 6f20907e14..4aca10a284 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -306,12 +306,11 @@ namespace gtsam { // Get the probabilities in the decision tree so we can threshold. std::vector probabilities; - // NOTE(Varun) this is potentially slow due to the cartesian product - auto allValues = DiscreteValues::CartesianProduct(this->discreteKeys()); - for (auto&& val : allValues) { - double prob = (*this)(val); - probabilities.push_back(prob); - } + this->visitLeaf([&](const Leaf& leaf) { + const size_t nrAssignments = leaf.nrAssignments(); + double prob = leaf.constant(); + probabilities.insert(probabilities.end(), nrAssignments, prob); + }); // The number of probabilities can be lower than max_leaves if (probabilities.size() <= N) { diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 75ba5a0594..5207e9372a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -108,7 +108,7 @@ TEST(GaussianMixtureFactor, Printing) { std::string expected = R"(Hybrid [x1 x2; 1]{ Choice(1) - 0 Leaf : + 0 Leaf [1]: A[x1] = [ 0; 0 @@ -120,7 +120,7 @@ TEST(GaussianMixtureFactor, Printing) { b = [ 0 0 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x1] = [ 0; 0 diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 29136c62ad..0a621c42d2 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -493,7 +493,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf : + 0 Leaf [1]: A[x0] = [ -1 ] @@ -503,7 +503,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x0] = [ -1 ] @@ -517,7 +517,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf : + 0 Leaf [1]: A[x1] = [ -1 ] @@ -527,7 +527,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x1] = [ -1 ] @@ -551,16 +551,16 @@ factor 4: b = [ -10 ] No noise model factor 5: P( m0 ): - Leaf 0.5 + Leaf [2] 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf 0.33333333 - 0 1 Leaf 0.6 + 0 0 Leaf [1]0.33333333 + 0 1 Leaf [1] 0.6 1 Choice(m0) - 1 0 Leaf 0.66666667 - 1 1 Leaf 0.4 + 1 0 Leaf [1]0.66666667 + 1 1 Leaf [1] 0.4 )"; #else @@ -575,7 +575,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf : + 0 Leaf [1]: A[x0] = [ -1 ] @@ -585,7 +585,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x0] = [ -1 ] @@ -599,7 +599,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf : + 0 Leaf [1]: A[x1] = [ -1 ] @@ -609,7 +609,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf : + 1 Leaf [1]: A[x1] = [ -1 ] @@ -634,17 +634,17 @@ factor 4: No noise model factor 5: P( m0 ): Choice(m0) - 0 Leaf 0.5 - 1 Leaf 0.5 + 0 Leaf [1] 0.5 + 1 Leaf [1] 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf 0.33333333 - 0 1 Leaf 0.6 + 0 0 Leaf [1]0.33333333 + 0 1 Leaf [1] 0.6 1 Choice(m0) - 1 0 Leaf 0.66666667 - 1 1 Leaf 0.4 + 1 0 Leaf [1]0.66666667 + 1 1 Leaf [1] 0.4 )"; #endif @@ -657,13 +657,13 @@ size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), Choice(m0) - 0 Leaf p(x0 | x1) + 0 Leaf [1] p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] No noise model - 1 Leaf p(x0 | x1) + 1 Leaf [1] p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] @@ -673,26 +673,26 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf p(x1 | x2) + 0 0 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] No noise model - 0 1 Leaf p(x1 | x2) + 0 1 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] No noise model 1 Choice(m0) - 1 0 Leaf p(x1 | x2) + 1 0 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] No noise model - 1 1 Leaf p(x1 | x2) + 1 1 Leaf [1] p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] @@ -702,14 +702,14 @@ conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf p(x2) + 0 0 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.1489 ] mean: 1 elements x2: -1.0099 No noise model - 0 1 Leaf p(x2) + 0 1 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.1479 ] mean: 1 elements @@ -717,14 +717,14 @@ conditional 2: Hybrid P( x2 | m0 m1) No noise model 1 Choice(m0) - 1 0 Leaf p(x2) + 1 0 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.0504 ] mean: 1 elements x2: -1.0001 No noise model - 1 1 Leaf p(x2) + 1 1 Leaf [1] p(x2) R = [ 10.0494 ] d = [ -10.0494 ] mean: 1 elements diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 0827e4eedb..03fdccff26 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -63,8 +63,8 @@ TEST(MixtureFactor, Printing) { R"(Hybrid [x1 x2; 1] MixtureFactor Choice(1) - 0 Leaf Nonlinear factor on 2 keys - 1 Leaf Nonlinear factor on 2 keys + 0 Leaf [1] Nonlinear factor on 2 keys + 1 Leaf [1] Nonlinear factor on 2 keys )"; EXPECT(assert_print_equal(expected, mixtureFactor)); } From cf6c1cad1fad46d43857b941cc9227db0fd34f78 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 18 Jul 2023 22:38:24 -0400 Subject: [PATCH 27/36] fix tests --- .../tests/testDiscreteFactorGraph.cpp | 82 +++++++++---------- gtsam/hybrid/MixtureFactor.h | 2 +- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 6e86215957..08b5f2db97 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -356,37 +356,37 @@ size: 2 factor 0: f[ (m0,2), (m1,2), (m2,2), ] Choice(m2) 0 Choice(m1) - 0 0 Leaf [2] 0 + 0 0 Leaf [2] 0 0 1 Choice(m0) - 0 1 0 Leaf [1]0.27527634 - 0 1 1 Leaf [1] 0 + 0 1 0 Leaf [1] 0.27527634 + 0 1 1 Leaf [1] 0 1 Choice(m1) - 1 0 Leaf [2] 0 + 1 0 Leaf [2] 0 1 1 Choice(m0) - 1 1 0 Leaf [1]0.44944733 - 1 1 1 Leaf [1]0.27527634 + 1 1 0 Leaf [1] 0.44944733 + 1 1 1 Leaf [1] 0.27527634 factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] Choice(m3) 0 Choice(m2) 0 0 Choice(m1) - 0 0 0 Leaf [2] 1 - 0 0 1 Leaf [2]0.015366387 + 0 0 0 Leaf [2] 1 + 0 0 1 Leaf [2] 0.015366387 0 1 Choice(m1) - 0 1 0 Leaf [2] 1 + 0 1 0 Leaf [2] 1 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1]0.015365663 + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1] 0.015365663 1 Choice(m2) 1 0 Choice(m1) - 1 0 0 Leaf [2] 1 + 1 0 0 Leaf [2] 1 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1]0.0094115739 - 1 0 1 1 Leaf [1]0.0094115652 + 1 0 1 0 Leaf [1] 0.0094115739 + 1 0 1 1 Leaf [1] 0.0094115652 1 1 Choice(m1) - 1 1 0 Leaf [2] 1 + 1 1 0 Leaf [2] 1 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1]0.009321081 + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1] 0.009321081 )"; #else string expected_dfg = R"( @@ -395,50 +395,50 @@ factor 0: f[ (m0,2), (m1,2), (m2,2), ] Choice(m2) 0 Choice(m1) 0 0 Choice(m0) - 0 0 0 Leaf [1] 0 - 0 0 1 Leaf [1] 0 + 0 0 0 Leaf [1] 0 + 0 0 1 Leaf [1] 0 0 1 Choice(m0) - 0 1 0 Leaf [1]0.27527634 - 0 1 1 Leaf [1]0.44944733 + 0 1 0 Leaf [1] 0.27527634 + 0 1 1 Leaf [1] 0.44944733 1 Choice(m1) 1 0 Choice(m0) - 1 0 0 Leaf [1] 0 - 1 0 1 Leaf [1] 0 + 1 0 0 Leaf [1] 0 + 1 0 1 Leaf [1] 0 1 1 Choice(m0) - 1 1 0 Leaf [1] 0 - 1 1 1 Leaf [1]0.27527634 + 1 1 0 Leaf [1] 0 + 1 1 1 Leaf [1] 0.27527634 factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] Choice(m3) 0 Choice(m2) 0 0 Choice(m1) 0 0 0 Choice(m0) - 0 0 0 0 Leaf [1] 1 - 0 0 0 1 Leaf [1] 1 + 0 0 0 0 Leaf [1] 1 + 0 0 0 1 Leaf [1] 1 0 0 1 Choice(m0) - 0 0 1 0 Leaf [1]0.015366387 - 0 0 1 1 Leaf [1]0.015366387 + 0 0 1 0 Leaf [1] 0.015366387 + 0 0 1 1 Leaf [1] 0.015366387 0 1 Choice(m1) 0 1 0 Choice(m0) - 0 1 0 0 Leaf [1] 1 - 0 1 0 1 Leaf [1] 1 + 0 1 0 0 Leaf [1] 1 + 0 1 0 1 Leaf [1] 1 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1]0.015365663 + 0 1 1 0 Leaf [1] 1 + 0 1 1 1 Leaf [1] 0.015365663 1 Choice(m2) 1 0 Choice(m1) 1 0 0 Choice(m0) - 1 0 0 0 Leaf [1] 1 - 1 0 0 1 Leaf [1] 1 + 1 0 0 0 Leaf [1] 1 + 1 0 0 1 Leaf [1] 1 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1]0.0094115739 - 1 0 1 1 Leaf [1]0.0094115652 + 1 0 1 0 Leaf [1] 0.0094115739 + 1 0 1 1 Leaf [1] 0.0094115652 1 1 Choice(m1) 1 1 0 Choice(m0) - 1 1 0 0 Leaf [1] 1 - 1 1 0 1 Leaf [1] 1 + 1 1 0 0 Leaf [1] 1 + 1 1 0 1 Leaf [1] 1 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1]0.009321081 + 1 1 1 0 Leaf [1] 1 + 1 1 1 1 Leaf [1] 0.009321081 )"; #endif diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index 529c8687b1..df8e0193ad 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -191,7 +191,7 @@ class MixtureFactor : public HybridFactor { std::cout << "\nMixtureFactor\n"; auto valueFormatter = [](const sharedFactor& v) { if (v) { - return " Nonlinear factor on " + std::to_string(v->size()) + " keys"; + return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; } else { return std::string("nullptr"); } From 1dfb388587b86c6808e354b8b3086c3725bdfcb4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Jul 2023 15:47:29 -0400 Subject: [PATCH 28/36] fix odd behavior in nrAssignments --- gtsam/discrete/DecisionTree-inl.h | 29 ++++++++++++------- gtsam/discrete/DecisionTree.h | 8 +++-- gtsam/discrete/tests/testDecisionTree.cpp | 2 +- .../tests/testDiscreteFactorGraph.cpp | 11 ++++--- 4 files changed, 30 insertions(+), 20 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index c3f6b3c0e4..541bd77f42 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -137,8 +137,8 @@ namespace gtsam { // Applying binary operator to two leaves results in a leaf NodePtr apply_g_op_fL(const Leaf& fL, const Binary& op) const override { // fL op gL - // TODO(Varun) nrAssignments setting is not correct. - // Depending on f and g, the nrAssignments can be different. This is a bug! + // The nrAssignments is always set to fL since we consider g operating on + // (or modifying) f. NodePtr h(new Leaf(op(fL.constant_, constant_), fL.nrAssignments())); return h; } @@ -149,8 +149,9 @@ namespace gtsam { } /** choose a branch, create new memory ! */ - NodePtr choose(const L& label, size_t index) const override { - return NodePtr(new Leaf(constant(), nrAssignments())); + NodePtr choose(const L& label, size_t index, + bool make_unique = true) const override { + return NodePtr(new Leaf(constant(), 1)); } bool isLeaf() const override { return true; } @@ -468,14 +469,22 @@ namespace gtsam { } /** choose a branch, recursively */ - NodePtr choose(const L& label, size_t index) const override { + NodePtr choose(const L& label, size_t index, + bool make_unique = true) const override { if (label_ == label) return branches_[index]; // choose branch // second case, not label of interest, just recurse auto r = std::make_shared(label_, branches_.size()); - for (auto&& branch : branches_) - r->push_back(branch->choose(label, index)); - return Unique(r); + for (auto&& branch : branches_) { + r->push_back(branch->choose(label, index, make_unique)); + } + + if (make_unique) { + return Unique(r); + } else { + return r; + } + // return Unique(r); } private: @@ -997,9 +1006,9 @@ namespace gtsam { template DecisionTree DecisionTree::combine(const L& label, size_t cardinality, const Binary& op) const { - DecisionTree result = choose(label, 0); + DecisionTree result = choose(label, 0, false); for (size_t index = 1; index < cardinality; index++) { - DecisionTree chosen = choose(label, index); + DecisionTree chosen = choose(label, index, false); result = result.apply(chosen, op); } return result; diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 48a1a45961..7761de84e0 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -129,7 +129,8 @@ namespace gtsam { virtual Ptr apply_f_op_g(const Node&, const Binary&) const = 0; virtual Ptr apply_g_op_fL(const Leaf&, const Binary&) const = 0; virtual Ptr apply_g_op_fC(const Choice&, const Binary&) const = 0; - virtual Ptr choose(const L& label, size_t index) const = 0; + virtual Ptr choose(const L& label, size_t index, + bool make_unique = true) const = 0; virtual bool isLeaf() const = 0; private: @@ -403,8 +404,9 @@ namespace gtsam { /** create a new function where value(label)==index * It's like "restrict" in Darwiche09book pg329, 330? */ - DecisionTree choose(const L& label, size_t index) const { - NodePtr newRoot = root_->choose(label, index); + DecisionTree choose(const L& label, size_t index, + bool make_unique = true) const { + NodePtr newRoot = root_->choose(label, index, make_unique); return DecisionTree(newRoot); } diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index e94c72c341..ff50c89526 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -236,7 +236,7 @@ TEST(DecisionTree, Example) { #ifdef GTSAM_DT_MERGING EXPECT(assert_equal(DT(0.0), actual0)); #else - // EXPECT(assert_equal(DT({0.0, 0.0}), actual0)); + EXPECT(assert_equal(DT({0.0, 0.0}), actual0)); #endif DOT(actual0); diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 08b5f2db97..871583a2cc 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -349,6 +349,7 @@ TEST(DiscreteFactorGraph, markdown) { EXPECT_DOUBLES_EQUAL(0.3, graph[0]->operator()(values), 1e-9); } +/* ************************************************************************* */ TEST(DiscreteFactorGraph, NrAssignments) { #ifdef GTSAM_DT_MERGING string expected_dfg = R"( @@ -358,13 +359,13 @@ factor 0: f[ (m0,2), (m1,2), (m2,2), ] 0 Choice(m1) 0 0 Leaf [2] 0 0 1 Choice(m0) - 0 1 0 Leaf [1] 0.27527634 + 0 1 0 Leaf [1] 0.17054468 0 1 1 Leaf [1] 0 1 Choice(m1) 1 0 Leaf [2] 0 1 1 Choice(m0) - 1 1 0 Leaf [1] 0.44944733 - 1 1 1 Leaf [1] 0.27527634 + 1 1 0 Leaf [1] 0.27845056 + 1 1 1 Leaf [1] 0.17054468 factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] Choice(m3) 0 Choice(m2) @@ -445,9 +446,7 @@ factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] DiscreteKeys d0{{M(0), 2}, {M(1), 2}, {M(2), 2}}; std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; AlgebraicDecisionTree dt(d0, p0); - //TODO(Varun) Passing ADT to DiscreteConditional causes nrAssignments to get messed up - // Issue seems to be in DecisionTreeFactor.cpp L104 - DiscreteConditional f0(3, DecisionTreeFactor(d0, dt)); + DiscreteConditional f0(3, d0, dt); DiscreteKeys d1{{M(0), 2}, {M(1), 2}, {M(2), 2}, {M(3), 2}}; std::vector p1 = { From ea24a2c7e8e8ca10cdd02ddea663748905cf8db4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Jul 2023 15:47:58 -0400 Subject: [PATCH 29/36] park changes so I can come back to them later --- gtsam/discrete/DiscreteFactorGraph.cpp | 10 ++ .../tests/testDiscreteConditional.cpp | 106 ++++++++++++++---- gtsam/hybrid/HybridBayesNet.cpp | 13 +++ gtsam/hybrid/HybridGaussianFactorGraph.cpp | 2 +- 4 files changed, 111 insertions(+), 20 deletions(-) diff --git a/gtsam/discrete/DiscreteFactorGraph.cpp b/gtsam/discrete/DiscreteFactorGraph.cpp index 4ededbb8be..e0d2ed4de7 100644 --- a/gtsam/discrete/DiscreteFactorGraph.cpp +++ b/gtsam/discrete/DiscreteFactorGraph.cpp @@ -204,12 +204,18 @@ namespace gtsam { std::pair // EliminateDiscrete(const DiscreteFactorGraph& factors, const Ordering& frontalKeys) { + factors.print("The Factors to eliminate:"); // PRODUCT: multiply all factors gttic(product); DecisionTreeFactor product; for (auto&& factor : factors) product = (*factor) * product; gttoc(product); + std::cout << "\n\n==========" << std::endl; + std::cout << "Product" << std::endl; + std::cout << std::endl; + product.print(); + // Max over all the potentials by pretending all keys are frontal: auto normalization = product.max(product.size()); @@ -221,6 +227,10 @@ namespace gtsam { DecisionTreeFactor::shared_ptr sum = product.sum(frontalKeys); gttoc(sum); + std::cout << "\n->Sum" << std::endl; + sum->print(); + std::cout << "----------------------" << std::endl; + // Ordering keys for the conditional so that frontalKeys are really in front Ordering orderedKeys; orderedKeys.insert(orderedKeys.end(), frontalKeys.begin(), diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index aa393d74cc..397e7ff0c3 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -22,13 +22,15 @@ #include #include #include +#include +#include using namespace std; using namespace gtsam; /* ************************************************************************* */ -TEST(DiscreteConditional, constructors) { +TEST_DISABLED(DiscreteConditional, constructors) { DiscreteKey X(0, 2), Y(2, 3), Z(1, 2); // watch ordering ! DiscreteConditional actual(X | Y = "1/1 2/3 1/4"); @@ -49,7 +51,7 @@ TEST(DiscreteConditional, constructors) { } /* ************************************************************************* */ -TEST(DiscreteConditional, constructors_alt_interface) { +TEST_DISABLED(DiscreteConditional, constructors_alt_interface) { DiscreteKey X(0, 2), Y(2, 3), Z(1, 2); // watch ordering ! const Signature::Row r1{1, 1}, r2{2, 3}, r3{1, 4}; @@ -68,7 +70,7 @@ TEST(DiscreteConditional, constructors_alt_interface) { } /* ************************************************************************* */ -TEST(DiscreteConditional, constructors2) { +TEST_DISABLED(DiscreteConditional, constructors2) { DiscreteKey C(0, 2), B(1, 2); Signature signature((C | B) = "4/1 3/1"); DiscreteConditional actual(signature); @@ -78,7 +80,7 @@ TEST(DiscreteConditional, constructors2) { } /* ************************************************************************* */ -TEST(DiscreteConditional, constructors3) { +TEST_DISABLED(DiscreteConditional, constructors3) { DiscreteKey C(0, 2), B(1, 2), A(2, 2); Signature signature((C | B, A) = "4/1 1/1 1/1 1/4"); DiscreteConditional actual(signature); @@ -89,7 +91,7 @@ TEST(DiscreteConditional, constructors3) { /* ****************************************************************************/ // Test evaluate for a discrete Prior P(Asia). -TEST(DiscreteConditional, PriorProbability) { +TEST_DISABLED(DiscreteConditional, PriorProbability) { constexpr Key asiaKey = 0; const DiscreteKey Asia(asiaKey, 2); DiscreteConditional dc(Asia, "4/6"); @@ -100,7 +102,7 @@ TEST(DiscreteConditional, PriorProbability) { /* ************************************************************************* */ // Check that error, logProbability, evaluate all work as expected. -TEST(DiscreteConditional, probability) { +TEST_DISABLED(DiscreteConditional, probability) { DiscreteKey C(2, 2), D(4, 2), E(3, 2); DiscreteConditional C_given_DE((C | D, E) = "4/1 1/1 1/1 1/4"); @@ -114,7 +116,7 @@ TEST(DiscreteConditional, probability) { /* ************************************************************************* */ // Check calculation of joint P(A,B) -TEST(DiscreteConditional, Multiply) { +TEST_DISABLED(DiscreteConditional, Multiply) { DiscreteKey A(1, 2), B(0, 2); DiscreteConditional conditional(A | B = "1/2 2/1"); DiscreteConditional prior(B % "1/2"); @@ -139,7 +141,7 @@ TEST(DiscreteConditional, Multiply) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B|C) -TEST(DiscreteConditional, Multiply2) { +TEST_DISABLED(DiscreteConditional, Multiply2) { DiscreteKey A(0, 2), B(1, 2), C(2, 2); DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_C(B | C = "1/3 3/1"); @@ -159,7 +161,7 @@ TEST(DiscreteConditional, Multiply2) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B|C), double check keys -TEST(DiscreteConditional, Multiply3) { +TEST_DISABLED(DiscreteConditional, Multiply3) { DiscreteKey A(1, 2), B(2, 2), C(0, 2); // different keys!!! DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_C(B | C = "1/3 3/1"); @@ -179,7 +181,7 @@ TEST(DiscreteConditional, Multiply3) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B,C|D,E) = P(A,B|D) P(C|D,E) -TEST(DiscreteConditional, Multiply4) { +TEST_DISABLED(DiscreteConditional, Multiply4) { DiscreteKey A(0, 2), B(1, 2), C(2, 2), D(4, 2), E(3, 2); DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_D(B | D = "1/3 3/1"); @@ -203,7 +205,7 @@ TEST(DiscreteConditional, Multiply4) { /* ************************************************************************* */ // Check calculation of marginals for joint P(A,B) -TEST(DiscreteConditional, marginals) { +TEST_DISABLED(DiscreteConditional, marginals) { DiscreteKey A(1, 2), B(0, 2); DiscreteConditional conditional(A | B = "1/2 2/1"); DiscreteConditional prior(B % "1/2"); @@ -225,7 +227,7 @@ TEST(DiscreteConditional, marginals) { /* ************************************************************************* */ // Check calculation of marginals in case branches are pruned -TEST(DiscreteConditional, marginals2) { +TEST_DISABLED(DiscreteConditional, marginals2) { DiscreteKey A(0, 2), B(1, 2); // changing keys need to make pruning happen! DiscreteConditional conditional(A | B = "2/2 3/1"); DiscreteConditional prior(B % "1/2"); @@ -241,7 +243,7 @@ TEST(DiscreteConditional, marginals2) { } /* ************************************************************************* */ -TEST(DiscreteConditional, likelihood) { +TEST_DISABLED(DiscreteConditional, likelihood) { DiscreteKey X(0, 2), Y(1, 3); DiscreteConditional conditional(X | Y = "2/8 4/6 5/5"); @@ -256,7 +258,7 @@ TEST(DiscreteConditional, likelihood) { /* ************************************************************************* */ // Check choose on P(C|D,E) -TEST(DiscreteConditional, choose) { +TEST_DISABLED(DiscreteConditional, choose) { DiscreteKey C(2, 2), D(4, 2), E(3, 2); DiscreteConditional C_given_DE((C | D, E) = "4/1 1/1 1/1 1/4"); @@ -284,7 +286,7 @@ TEST(DiscreteConditional, choose) { /* ************************************************************************* */ // Check markdown representation looks as expected, no parents. -TEST(DiscreteConditional, markdown_prior) { +TEST_DISABLED(DiscreteConditional, markdown_prior) { DiscreteKey A(Symbol('x', 1), 3); DiscreteConditional conditional(A % "1/2/2"); string expected = @@ -300,7 +302,7 @@ TEST(DiscreteConditional, markdown_prior) { /* ************************************************************************* */ // Check markdown representation looks as expected, no parents + names. -TEST(DiscreteConditional, markdown_prior_names) { +TEST_DISABLED(DiscreteConditional, markdown_prior_names) { Symbol x1('x', 1); DiscreteKey A(x1, 3); DiscreteConditional conditional(A % "1/2/2"); @@ -318,7 +320,7 @@ TEST(DiscreteConditional, markdown_prior_names) { /* ************************************************************************* */ // Check markdown representation looks as expected, multivalued. -TEST(DiscreteConditional, markdown_multivalued) { +TEST_DISABLED(DiscreteConditional, markdown_multivalued) { DiscreteKey A(Symbol('a', 1), 3), B(Symbol('b', 1), 5); DiscreteConditional conditional( A | B = "2/88/10 2/20/78 33/33/34 33/33/34 95/2/3"); @@ -337,7 +339,7 @@ TEST(DiscreteConditional, markdown_multivalued) { /* ************************************************************************* */ // Check markdown representation looks as expected, two parents + names. -TEST(DiscreteConditional, markdown) { +TEST_DISABLED(DiscreteConditional, markdown) { DiscreteKey A(2, 2), B(1, 2), C(0, 3); DiscreteConditional conditional(A, {B, C}, "0/1 1/3 1/1 3/1 0/1 1/0"); string expected = @@ -360,7 +362,7 @@ TEST(DiscreteConditional, markdown) { /* ************************************************************************* */ // Check html representation looks as expected, two parents + names. -TEST(DiscreteConditional, html) { +TEST_DISABLED(DiscreteConditional, html) { DiscreteKey A(2, 2), B(1, 2), C(0, 3); DiscreteConditional conditional(A, {B, C}, "0/1 1/3 1/1 3/1 0/1 1/0"); string expected = @@ -388,6 +390,72 @@ TEST(DiscreteConditional, html) { EXPECT(actual == expected); } +/* ************************************************************************* */ +TEST(DiscreteConditional, NrAssignments) { +#ifdef GTSAM_DT_MERGING + string expected = R"( P( 0 1 2 ): + Choice(2) + 0 Choice(1) + 0 0 Leaf [2] 0 + 0 1 Choice(0) + 0 1 0 Leaf [1] 0.27527634 + 0 1 1 Leaf [1] 0 + 1 Choice(1) + 1 0 Leaf [2] 0 + 1 1 Choice(0) + 1 1 0 Leaf [1] 0.44944733 + 1 1 1 Leaf [1] 0.27527634 + +)"; +#else + string expected = R"( P( 0 1 2 ): + Choice(2) + 0 Choice(1) + 0 0 Choice(0) + 0 0 0 Leaf [1] 0 + 0 0 1 Leaf [1] 0 + 0 1 Choice(0) + 0 1 0 Leaf [1] 0.27527634 + 0 1 1 Leaf [1] 0.44944733 + 1 Choice(1) + 1 0 Choice(0) + 1 0 0 Leaf [1] 0 + 1 0 1 Leaf [1] 0 + 1 1 Choice(0) + 1 1 0 Leaf [1] 0 + 1 1 1 Leaf [1] 0.27527634 + +)"; +#endif + + DiscreteKeys d0{{0, 2}, {1, 2}, {2, 2}}; + std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; + AlgebraicDecisionTree dt(d0, p0); + DecisionTreeFactor dtf(d0, dt); + DiscreteConditional f0(3, dtf); + + EXPECT(assert_print_equal(expected, f0)); + + DiscreteFactorGraph dfg{f0}; + dfg.print(); + auto dbn = dfg.eliminateSequential(); + dbn->print(); + + // DiscreteKeys d0{{0, 2}, {1, 2}}; + // std::vector p0 = {0, 1, 0, 2}; + // AlgebraicDecisionTree dt0(d0, p0); + // dt0.print("", DefaultKeyFormatter); + + // DiscreteKeys d1{{0, 2}}; + // std::vector p1 = {1, 1, 1, 1}; + // AlgebraicDecisionTree dt1(d0, p1); + // dt1.print("", DefaultKeyFormatter); + + // auto dd = dt0 / dt1; + // dd.print("", DefaultKeyFormatter); +} + + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index b4bf612208..a6163fd3ce 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -140,15 +140,26 @@ DecisionTreeFactor HybridBayesNet::pruneDiscreteConditionals( for (size_t i = 0; i < this->size(); i++) { auto conditional = this->at(i); if (conditional->isDiscrete()) { + std::cout << ">>>" << std::endl; + conditional->print(); discreteProbs = discreteProbs * (*conditional->asDiscrete()); + // discreteProbs.print(); + // std::cout << "================\n" << std::endl; Ordering conditional_keys(conditional->frontals()); discrete_frontals += conditional_keys; discrete_factor_idxs.push_back(i); } } + std::cout << "Original Joint Prob:" << std::endl; + std::cout << discreteProbs.nrAssignments() << std::endl; + discreteProbs.print(); const DecisionTreeFactor prunedDiscreteProbs = discreteProbs.prune(maxNrLeaves); + std::cout << "Pruned Joint Prob:" << std::endl; + std::cout << prunedDiscreteProbs.nrAssignments() << std::endl; + prunedDiscreteProbs.print(); + std::cout << "\n\n\n"; gttoc_(HybridBayesNet_PruneDiscreteConditionals); // Eliminate joint probability back into conditionals @@ -159,6 +170,8 @@ DecisionTreeFactor HybridBayesNet::pruneDiscreteConditionals( // Assign pruned discrete conditionals back at the correct indices. for (size_t i = 0; i < discrete_factor_idxs.size(); i++) { size_t idx = discrete_factor_idxs.at(i); + // std::cout << i << std::endl; + // dbn->at(i)->print(); this->at(idx) = std::make_shared(dbn->at(i)); } gttoc_(HybridBayesNet_UpdateDiscreteConditionals); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2d4ac83f61..444cef4397 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -178,7 +178,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, throwRuntimeError("continuousElimination", f); } } - + dfg.print("The DFG to eliminate"); // NOTE: This does sum-product. For max-product, use EliminateForMPE. auto result = EliminateDiscrete(dfg, frontalKeys); From b35fb0f668b30a087f5516600758e3f874800183 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 28 Jul 2023 15:48:13 -0400 Subject: [PATCH 30/36] update tests --- gtsam/discrete/tests/testDecisionTree.cpp | 40 +++++++------------ .../tests/testDiscreteConditional.cpp | 38 +++++++++--------- 2 files changed, 34 insertions(+), 44 deletions(-) diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index ff50c89526..54e5b3de97 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -473,47 +473,37 @@ TEST(DecisionTree, VisitWithPruned) { #ifdef GTSAM_DT_MERGING expectedAssignment = {{"B", 0}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(0)); -#else - expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 0}}; - EXPECT(expectedAssignment == choices.at(0)); -#endif -#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(1)); -#else - expectedAssignment = {{"A", 1}, {"B", 0}, {"C", 0}}; - EXPECT(expectedAssignment == choices.at(1)); -#endif -#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 0}}; EXPECT(expectedAssignment == choices.at(2)); -#else - expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 0}}; - EXPECT(expectedAssignment == choices.at(2)); -#endif -#ifdef GTSAM_DT_MERGING expectedAssignment = {{"B", 0}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(3)); -#else - expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 0}}; - EXPECT(expectedAssignment == choices.at(3)); -#endif -#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(4)); -#else - expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 1}}; - EXPECT(expectedAssignment == choices.at(4)); -#endif -#ifdef GTSAM_DT_MERGING expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(5)); #else + expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(0)); + + expectedAssignment = {{"A", 1}, {"B", 0}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(1)); + + expectedAssignment = {{"A", 0}, {"B", 1}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(2)); + + expectedAssignment = {{"A", 1}, {"B", 1}, {"C", 0}}; + EXPECT(expectedAssignment == choices.at(3)); + + expectedAssignment = {{"A", 0}, {"B", 0}, {"C", 1}}; + EXPECT(expectedAssignment == choices.at(4)); + expectedAssignment = {{"A", 1}, {"B", 0}, {"C", 1}}; EXPECT(expectedAssignment == choices.at(5)); #endif diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index 397e7ff0c3..3b944b0550 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -30,7 +30,7 @@ using namespace std; using namespace gtsam; /* ************************************************************************* */ -TEST_DISABLED(DiscreteConditional, constructors) { +TEST(DiscreteConditional, constructors) { DiscreteKey X(0, 2), Y(2, 3), Z(1, 2); // watch ordering ! DiscreteConditional actual(X | Y = "1/1 2/3 1/4"); @@ -51,7 +51,7 @@ TEST_DISABLED(DiscreteConditional, constructors) { } /* ************************************************************************* */ -TEST_DISABLED(DiscreteConditional, constructors_alt_interface) { +TEST(DiscreteConditional, constructors_alt_interface) { DiscreteKey X(0, 2), Y(2, 3), Z(1, 2); // watch ordering ! const Signature::Row r1{1, 1}, r2{2, 3}, r3{1, 4}; @@ -70,7 +70,7 @@ TEST_DISABLED(DiscreteConditional, constructors_alt_interface) { } /* ************************************************************************* */ -TEST_DISABLED(DiscreteConditional, constructors2) { +TEST(DiscreteConditional, constructors2) { DiscreteKey C(0, 2), B(1, 2); Signature signature((C | B) = "4/1 3/1"); DiscreteConditional actual(signature); @@ -80,7 +80,7 @@ TEST_DISABLED(DiscreteConditional, constructors2) { } /* ************************************************************************* */ -TEST_DISABLED(DiscreteConditional, constructors3) { +TEST(DiscreteConditional, constructors3) { DiscreteKey C(0, 2), B(1, 2), A(2, 2); Signature signature((C | B, A) = "4/1 1/1 1/1 1/4"); DiscreteConditional actual(signature); @@ -91,7 +91,7 @@ TEST_DISABLED(DiscreteConditional, constructors3) { /* ****************************************************************************/ // Test evaluate for a discrete Prior P(Asia). -TEST_DISABLED(DiscreteConditional, PriorProbability) { +TEST(DiscreteConditional, PriorProbability) { constexpr Key asiaKey = 0; const DiscreteKey Asia(asiaKey, 2); DiscreteConditional dc(Asia, "4/6"); @@ -102,7 +102,7 @@ TEST_DISABLED(DiscreteConditional, PriorProbability) { /* ************************************************************************* */ // Check that error, logProbability, evaluate all work as expected. -TEST_DISABLED(DiscreteConditional, probability) { +TEST(DiscreteConditional, probability) { DiscreteKey C(2, 2), D(4, 2), E(3, 2); DiscreteConditional C_given_DE((C | D, E) = "4/1 1/1 1/1 1/4"); @@ -116,7 +116,7 @@ TEST_DISABLED(DiscreteConditional, probability) { /* ************************************************************************* */ // Check calculation of joint P(A,B) -TEST_DISABLED(DiscreteConditional, Multiply) { +TEST(DiscreteConditional, Multiply) { DiscreteKey A(1, 2), B(0, 2); DiscreteConditional conditional(A | B = "1/2 2/1"); DiscreteConditional prior(B % "1/2"); @@ -141,7 +141,7 @@ TEST_DISABLED(DiscreteConditional, Multiply) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B|C) -TEST_DISABLED(DiscreteConditional, Multiply2) { +TEST(DiscreteConditional, Multiply2) { DiscreteKey A(0, 2), B(1, 2), C(2, 2); DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_C(B | C = "1/3 3/1"); @@ -161,7 +161,7 @@ TEST_DISABLED(DiscreteConditional, Multiply2) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B|C), double check keys -TEST_DISABLED(DiscreteConditional, Multiply3) { +TEST(DiscreteConditional, Multiply3) { DiscreteKey A(1, 2), B(2, 2), C(0, 2); // different keys!!! DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_C(B | C = "1/3 3/1"); @@ -181,7 +181,7 @@ TEST_DISABLED(DiscreteConditional, Multiply3) { /* ************************************************************************* */ // Check calculation of conditional joint P(A,B,C|D,E) = P(A,B|D) P(C|D,E) -TEST_DISABLED(DiscreteConditional, Multiply4) { +TEST(DiscreteConditional, Multiply4) { DiscreteKey A(0, 2), B(1, 2), C(2, 2), D(4, 2), E(3, 2); DiscreteConditional A_given_B(A | B = "1/3 3/1"); DiscreteConditional B_given_D(B | D = "1/3 3/1"); @@ -205,7 +205,7 @@ TEST_DISABLED(DiscreteConditional, Multiply4) { /* ************************************************************************* */ // Check calculation of marginals for joint P(A,B) -TEST_DISABLED(DiscreteConditional, marginals) { +TEST(DiscreteConditional, marginals) { DiscreteKey A(1, 2), B(0, 2); DiscreteConditional conditional(A | B = "1/2 2/1"); DiscreteConditional prior(B % "1/2"); @@ -227,7 +227,7 @@ TEST_DISABLED(DiscreteConditional, marginals) { /* ************************************************************************* */ // Check calculation of marginals in case branches are pruned -TEST_DISABLED(DiscreteConditional, marginals2) { +TEST(DiscreteConditional, marginals2) { DiscreteKey A(0, 2), B(1, 2); // changing keys need to make pruning happen! DiscreteConditional conditional(A | B = "2/2 3/1"); DiscreteConditional prior(B % "1/2"); @@ -243,7 +243,7 @@ TEST_DISABLED(DiscreteConditional, marginals2) { } /* ************************************************************************* */ -TEST_DISABLED(DiscreteConditional, likelihood) { +TEST(DiscreteConditional, likelihood) { DiscreteKey X(0, 2), Y(1, 3); DiscreteConditional conditional(X | Y = "2/8 4/6 5/5"); @@ -258,7 +258,7 @@ TEST_DISABLED(DiscreteConditional, likelihood) { /* ************************************************************************* */ // Check choose on P(C|D,E) -TEST_DISABLED(DiscreteConditional, choose) { +TEST(DiscreteConditional, choose) { DiscreteKey C(2, 2), D(4, 2), E(3, 2); DiscreteConditional C_given_DE((C | D, E) = "4/1 1/1 1/1 1/4"); @@ -286,7 +286,7 @@ TEST_DISABLED(DiscreteConditional, choose) { /* ************************************************************************* */ // Check markdown representation looks as expected, no parents. -TEST_DISABLED(DiscreteConditional, markdown_prior) { +TEST(DiscreteConditional, markdown_prior) { DiscreteKey A(Symbol('x', 1), 3); DiscreteConditional conditional(A % "1/2/2"); string expected = @@ -302,7 +302,7 @@ TEST_DISABLED(DiscreteConditional, markdown_prior) { /* ************************************************************************* */ // Check markdown representation looks as expected, no parents + names. -TEST_DISABLED(DiscreteConditional, markdown_prior_names) { +TEST(DiscreteConditional, markdown_prior_names) { Symbol x1('x', 1); DiscreteKey A(x1, 3); DiscreteConditional conditional(A % "1/2/2"); @@ -320,7 +320,7 @@ TEST_DISABLED(DiscreteConditional, markdown_prior_names) { /* ************************************************************************* */ // Check markdown representation looks as expected, multivalued. -TEST_DISABLED(DiscreteConditional, markdown_multivalued) { +TEST(DiscreteConditional, markdown_multivalued) { DiscreteKey A(Symbol('a', 1), 3), B(Symbol('b', 1), 5); DiscreteConditional conditional( A | B = "2/88/10 2/20/78 33/33/34 33/33/34 95/2/3"); @@ -339,7 +339,7 @@ TEST_DISABLED(DiscreteConditional, markdown_multivalued) { /* ************************************************************************* */ // Check markdown representation looks as expected, two parents + names. -TEST_DISABLED(DiscreteConditional, markdown) { +TEST(DiscreteConditional, markdown) { DiscreteKey A(2, 2), B(1, 2), C(0, 3); DiscreteConditional conditional(A, {B, C}, "0/1 1/3 1/1 3/1 0/1 1/0"); string expected = @@ -362,7 +362,7 @@ TEST_DISABLED(DiscreteConditional, markdown) { /* ************************************************************************* */ // Check html representation looks as expected, two parents + names. -TEST_DISABLED(DiscreteConditional, html) { +TEST(DiscreteConditional, html) { DiscreteKey A(2, 2), B(1, 2), C(0, 3); DiscreteConditional conditional(A, {B, C}, "0/1 1/3 1/1 3/1 0/1 1/0"); string expected = From 4e9d8495957d337e585c1cae9563b1ad9e23317f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 28 Jul 2023 15:51:00 -0400 Subject: [PATCH 31/36] remove prints --- gtsam/hybrid/HybridBayesNet.cpp | 15 +-------------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 3 +-- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1a9a9dcc50..3bafe5a9c7 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -139,27 +139,16 @@ DecisionTreeFactor HybridBayesNet::pruneDiscreteConditionals( for (size_t i = 0; i < this->size(); i++) { auto conditional = this->at(i); if (conditional->isDiscrete()) { - std::cout << ">>>" << std::endl; - conditional->print(); discreteProbs = discreteProbs * (*conditional->asDiscrete()); - // discreteProbs.print(); - // std::cout << "================\n" << std::endl; Ordering conditional_keys(conditional->frontals()); discrete_frontals += conditional_keys; discrete_factor_idxs.push_back(i); } } - std::cout << "Original Joint Prob:" << std::endl; - std::cout << discreteProbs.nrAssignments() << std::endl; - discreteProbs.print(); + const DecisionTreeFactor prunedDiscreteProbs = discreteProbs.prune(maxNrLeaves); - std::cout << "Pruned Joint Prob:" << std::endl; - std::cout << prunedDiscreteProbs.nrAssignments() << std::endl; - prunedDiscreteProbs.print(); - std::cout << "\n\n\n"; - gttoc_(HybridBayesNet_PruneDiscreteConditionals); // Eliminate joint probability back into conditionals DiscreteFactorGraph dfg{prunedDiscreteProbs}; @@ -168,8 +157,6 @@ DecisionTreeFactor HybridBayesNet::pruneDiscreteConditionals( // Assign pruned discrete conditionals back at the correct indices. for (size_t i = 0; i < discrete_factor_idxs.size(); i++) { size_t idx = discrete_factor_idxs.at(i); - // std::cout << i << std::endl; - // dbn->at(i)->print(); this->at(idx) = std::make_shared(dbn->at(i)); } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index ee36ecc879..191e13d76a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -96,7 +96,6 @@ static GaussianFactorGraphTree addGaussian( // TODO(dellaert): it's probably more efficient to first collect the discrete // keys, and then loop over all assignments to populate a vector. GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { - GaussianFactorGraphTree result; for (auto &f : factors_) { @@ -175,7 +174,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, throwRuntimeError("continuousElimination", f); } } - dfg.print("The DFG to eliminate"); + // NOTE: This does sum-product. For max-product, use EliminateForMPE. auto result = EliminateDiscrete(dfg, frontalKeys); From 4580c510ea384b5897bf3b5d32fcdef80fae49d8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 28 Jul 2023 15:51:59 -0400 Subject: [PATCH 32/36] undo change --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 191e13d76a..7a7ca0cbf7 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -96,6 +96,7 @@ static GaussianFactorGraphTree addGaussian( // TODO(dellaert): it's probably more efficient to first collect the discrete // keys, and then loop over all assignments to populate a vector. GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { + GaussianFactorGraphTree result; for (auto &f : factors_) { From 8cb33dd4f89414f262c38d1d3dd851894115a6f6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 29 Jul 2023 10:10:57 -0400 Subject: [PATCH 33/36] remove make_unique flag --- gtsam/discrete/DecisionTree-inl.h | 19 ++++++------------- gtsam/discrete/DecisionTree.h | 8 +++----- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 541bd77f42..413b4a9247 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -149,8 +149,7 @@ namespace gtsam { } /** choose a branch, create new memory ! */ - NodePtr choose(const L& label, size_t index, - bool make_unique = true) const override { + NodePtr choose(const L& label, size_t index) const override { return NodePtr(new Leaf(constant(), 1)); } @@ -469,22 +468,16 @@ namespace gtsam { } /** choose a branch, recursively */ - NodePtr choose(const L& label, size_t index, - bool make_unique = true) const override { + NodePtr choose(const L& label, size_t index) const override { if (label_ == label) return branches_[index]; // choose branch // second case, not label of interest, just recurse auto r = std::make_shared(label_, branches_.size()); for (auto&& branch : branches_) { - r->push_back(branch->choose(label, index, make_unique)); + r->push_back(branch->choose(label, index)); } - if (make_unique) { - return Unique(r); - } else { - return r; - } - // return Unique(r); + return Unique(r); } private: @@ -1006,9 +999,9 @@ namespace gtsam { template DecisionTree DecisionTree::combine(const L& label, size_t cardinality, const Binary& op) const { - DecisionTree result = choose(label, 0, false); + DecisionTree result = choose(label, 0); for (size_t index = 1; index < cardinality; index++) { - DecisionTree chosen = choose(label, index, false); + DecisionTree chosen = choose(label, index); result = result.apply(chosen, op); } return result; diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 7761de84e0..48a1a45961 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -129,8 +129,7 @@ namespace gtsam { virtual Ptr apply_f_op_g(const Node&, const Binary&) const = 0; virtual Ptr apply_g_op_fL(const Leaf&, const Binary&) const = 0; virtual Ptr apply_g_op_fC(const Choice&, const Binary&) const = 0; - virtual Ptr choose(const L& label, size_t index, - bool make_unique = true) const = 0; + virtual Ptr choose(const L& label, size_t index) const = 0; virtual bool isLeaf() const = 0; private: @@ -404,9 +403,8 @@ namespace gtsam { /** create a new function where value(label)==index * It's like "restrict" in Darwiche09book pg329, 330? */ - DecisionTree choose(const L& label, size_t index, - bool make_unique = true) const { - NodePtr newRoot = root_->choose(label, index, make_unique); + DecisionTree choose(const L& label, size_t index) const { + NodePtr newRoot = root_->choose(label, index); return DecisionTree(newRoot); } From 94d737e1db7aadef5751f1ad93e2f7ae3b1aea2c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 29 Jul 2023 10:13:56 -0400 Subject: [PATCH 34/36] remove printing --- gtsam/discrete/DiscreteFactorGraph.cpp | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/gtsam/discrete/DiscreteFactorGraph.cpp b/gtsam/discrete/DiscreteFactorGraph.cpp index e0d2ed4de7..4ededbb8be 100644 --- a/gtsam/discrete/DiscreteFactorGraph.cpp +++ b/gtsam/discrete/DiscreteFactorGraph.cpp @@ -204,18 +204,12 @@ namespace gtsam { std::pair // EliminateDiscrete(const DiscreteFactorGraph& factors, const Ordering& frontalKeys) { - factors.print("The Factors to eliminate:"); // PRODUCT: multiply all factors gttic(product); DecisionTreeFactor product; for (auto&& factor : factors) product = (*factor) * product; gttoc(product); - std::cout << "\n\n==========" << std::endl; - std::cout << "Product" << std::endl; - std::cout << std::endl; - product.print(); - // Max over all the potentials by pretending all keys are frontal: auto normalization = product.max(product.size()); @@ -227,10 +221,6 @@ namespace gtsam { DecisionTreeFactor::shared_ptr sum = product.sum(frontalKeys); gttoc(sum); - std::cout << "\n->Sum" << std::endl; - sum->print(); - std::cout << "----------------------" << std::endl; - // Ordering keys for the conditional so that frontalKeys are really in front Ordering orderedKeys; orderedKeys.insert(orderedKeys.end(), frontalKeys.begin(), From 4386c5114c82ea4ce68030186c8a4bce83ea42ae Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 6 Nov 2023 09:36:30 -0500 Subject: [PATCH 35/36] remove nrAssignments from DecisionTree --- gtsam/discrete/DecisionTree-inl.h | 47 ++------ gtsam/discrete/DecisionTree.h | 36 ------ gtsam/discrete/tests/testDecisionTree.cpp | 85 ------------- .../tests/testDiscreteConditional.cpp | 66 ----------- .../tests/testDiscreteFactorGraph.cpp | 112 ------------------ .../tests/testGaussianMixtureFactor.cpp | 4 +- .../tests/testHybridNonlinearFactorGraph.cpp | 58 ++++----- gtsam/hybrid/tests/testMixtureFactor.cpp | 4 +- 8 files changed, 41 insertions(+), 371 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index 413b4a9247..06161c2e15 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -53,26 +53,17 @@ namespace gtsam { /** constant stored in this leaf */ Y constant_; - /** The number of assignments contained within this leaf. - * Particularly useful when leaves have been pruned. - */ - size_t nrAssignments_; - /// Default constructor for serialization. Leaf() {} /// Constructor from constant - Leaf(const Y& constant, size_t nrAssignments = 1) - : constant_(constant), nrAssignments_(nrAssignments) {} + Leaf(const Y& constant) : constant_(constant) {} /// Return the constant const Y& constant() const { return constant_; } - /// Return the number of assignments contained within this leaf. - size_t nrAssignments() const { return nrAssignments_; } - /// Leaf-Leaf equality bool sameLeaf(const Leaf& q) const override { return constant_ == q.constant_; @@ -93,8 +84,7 @@ namespace gtsam { /// print void print(const std::string& s, const LabelFormatter& labelFormatter, const ValueFormatter& valueFormatter) const override { - std::cout << s << " Leaf [" << nrAssignments() << "] " - << valueFormatter(constant_) << std::endl; + std::cout << s << " Leaf " << valueFormatter(constant_) << std::endl; } /** Write graphviz format to stream `os`. */ @@ -114,14 +104,14 @@ namespace gtsam { /** apply unary operator */ NodePtr apply(const Unary& op) const override { - NodePtr f(new Leaf(op(constant_), nrAssignments_)); + NodePtr f(new Leaf(op(constant_))); return f; } /// Apply unary operator with assignment NodePtr apply(const UnaryAssignment& op, const Assignment& assignment) const override { - NodePtr f(new Leaf(op(assignment, constant_), nrAssignments_)); + NodePtr f(new Leaf(op(assignment, constant_))); return f; } @@ -137,9 +127,7 @@ namespace gtsam { // Applying binary operator to two leaves results in a leaf NodePtr apply_g_op_fL(const Leaf& fL, const Binary& op) const override { // fL op gL - // The nrAssignments is always set to fL since we consider g operating on - // (or modifying) f. - NodePtr h(new Leaf(op(fL.constant_, constant_), fL.nrAssignments())); + NodePtr h(new Leaf(op(fL.constant_, constant_))); return h; } @@ -150,7 +138,7 @@ namespace gtsam { /** choose a branch, create new memory ! */ NodePtr choose(const L& label, size_t index) const override { - return NodePtr(new Leaf(constant(), 1)); + return NodePtr(new Leaf(constant())); } bool isLeaf() const override { return true; } @@ -165,7 +153,6 @@ namespace gtsam { void serialize(ARCHIVE& ar, const unsigned int /*version*/) { ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); ar& BOOST_SERIALIZATION_NVP(constant_); - ar& BOOST_SERIALIZATION_NVP(nrAssignments_); } #endif }; // Leaf @@ -235,16 +222,8 @@ namespace gtsam { assert(f->branches().size() > 0); NodePtr f0 = f->branches_[0]; - // Compute total number of assignments - size_t nrAssignments = 0; - for (auto branch : f->branches()) { - if (auto leaf = std::dynamic_pointer_cast(branch)) { - nrAssignments += leaf->nrAssignments(); - } - } NodePtr newLeaf( - new Leaf(std::dynamic_pointer_cast(f0)->constant(), - nrAssignments)); + new Leaf(std::dynamic_pointer_cast(f0)->constant())); return newLeaf; } #endif @@ -732,7 +711,7 @@ namespace gtsam { // If leaf, apply unary conversion "op" and create a unique leaf. using MXLeaf = typename DecisionTree::Leaf; if (auto leaf = std::dynamic_pointer_cast(f)) { - return NodePtr(new Leaf(Y_of_X(leaf->constant()), leaf->nrAssignments())); + return NodePtr(new Leaf(Y_of_X(leaf->constant()))); } // Check if Choice @@ -879,16 +858,6 @@ namespace gtsam { return total; } - /****************************************************************************/ - template - size_t DecisionTree::nrAssignments() const { - size_t n = 0; - this->visitLeaf([&n](const DecisionTree::Leaf& leaf) { - n += leaf.nrAssignments(); - }); - return n; - } - /****************************************************************************/ // fold is just done with a visit template diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 48a1a45961..f1ad6e5534 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -328,42 +328,6 @@ namespace gtsam { /// Return the number of leaves in the tree. size_t nrLeaves() const; - /** - * @brief This is a convenience function which returns the total number of - * leaf assignments in the decision tree. - * This function is not used for anymajor operations within the discrete - * factor graph framework. - * - * Leaf assignments represent the cardinality of each leaf node, e.g. in a - * binary tree each leaf has 2 assignments. This includes counts removed - * from implicit pruning hence, it will always be >= nrLeaves(). - * - * E.g. we have a decision tree as below, where each node has 2 branches: - * - * Choice(m1) - * 0 Choice(m0) - * 0 0 Leaf 0.0 - * 0 1 Leaf 0.0 - * 1 Choice(m0) - * 1 0 Leaf 1.0 - * 1 1 Leaf 2.0 - * - * In the unpruned form, the tree will have 4 assignments, 2 for each key, - * and 4 leaves. - * - * In the pruned form, the number of assignments is still 4 but the number - * of leaves is now 3, as below: - * - * Choice(m1) - * 0 Leaf 0.0 - * 1 Choice(m0) - * 1 0 Leaf 1.0 - * 1 1 Leaf 2.0 - * - * @return size_t - */ - size_t nrAssignments() const; - /** * @brief Fold a binary function over the tree, returning accumulator. * diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 54e5b3de97..c625e1ba63 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -370,59 +370,6 @@ TEST(DecisionTree, Containers) { StringContainerTree converted(stringIntTree, container_of_int); } -/* ************************************************************************** */ -// Test nrAssignments. -TEST(DecisionTree, NrAssignments) { - const std::pair A("A", 2), B("B", 2), C("C", 2); - DT tree({A, B, C}, "1 1 1 1 1 1 1 1"); - - EXPECT_LONGS_EQUAL(8, tree.nrAssignments()); - -#ifdef GTSAM_DT_MERGING - EXPECT(tree.root_->isLeaf()); - auto leaf = std::dynamic_pointer_cast(tree.root_); - EXPECT_LONGS_EQUAL(8, leaf->nrAssignments()); -#endif - - DT tree2({C, B, A}, "1 1 1 2 3 4 5 5"); - /* The tree is - Choice(C) - 0 Choice(B) - 0 0 Leaf 1 - 0 1 Choice(A) - 0 1 0 Leaf 1 - 0 1 1 Leaf 2 - 1 Choice(B) - 1 0 Choice(A) - 1 0 0 Leaf 3 - 1 0 1 Leaf 4 - 1 1 Leaf 5 - */ - - EXPECT_LONGS_EQUAL(8, tree2.nrAssignments()); - - auto root = std::dynamic_pointer_cast(tree2.root_); - CHECK(root); - auto choice0 = std::dynamic_pointer_cast(root->branches()[0]); - CHECK(choice0); - -#ifdef GTSAM_DT_MERGING - EXPECT(choice0->branches()[0]->isLeaf()); - auto choice00 = std::dynamic_pointer_cast(choice0->branches()[0]); - CHECK(choice00); - EXPECT_LONGS_EQUAL(2, choice00->nrAssignments()); - - auto choice1 = std::dynamic_pointer_cast(root->branches()[1]); - CHECK(choice1); - auto choice10 = std::dynamic_pointer_cast(choice1->branches()[0]); - CHECK(choice10); - auto choice11 = std::dynamic_pointer_cast(choice1->branches()[1]); - CHECK(choice11); - EXPECT(choice11->isLeaf()); - EXPECT_LONGS_EQUAL(2, choice11->nrAssignments()); -#endif -} - /* ************************************************************************** */ // Test visit. TEST(DecisionTree, visit) { @@ -625,38 +572,6 @@ TEST(DecisionTree, ApplyWithAssignment) { #endif } -/* ************************************************************************** */ -// Test number of assignments. -TEST(DecisionTree, NrAssignments2) { - using gtsam::symbol_shorthand::M; - - std::vector probs = {0, 0, 1, 2}; - - /* Create the decision tree - Choice(m1) - 0 Leaf 0.000000 - 1 Choice(m0) - 1 0 Leaf 1.000000 - 1 1 Leaf 2.000000 - */ - DiscreteKeys keys{{M(1), 2}, {M(0), 2}}; - DecisionTree dt1(keys, probs); - EXPECT_LONGS_EQUAL(4, dt1.nrAssignments()); - - /* Create the DecisionTree - Choice(m1) - 0 Choice(m0) - 0 0 Leaf 0.000000 - 0 1 Leaf 1.000000 - 1 Choice(m0) - 1 0 Leaf 0.000000 - 1 1 Leaf 2.000000 - */ - DiscreteKeys keys2{{M(0), 2}, {M(1), 2}}; - DecisionTree dt2(keys2, probs); - EXPECT_LONGS_EQUAL(4, dt2.nrAssignments()); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index 3b944b0550..dae2797134 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -390,72 +390,6 @@ TEST(DiscreteConditional, html) { EXPECT(actual == expected); } -/* ************************************************************************* */ -TEST(DiscreteConditional, NrAssignments) { -#ifdef GTSAM_DT_MERGING - string expected = R"( P( 0 1 2 ): - Choice(2) - 0 Choice(1) - 0 0 Leaf [2] 0 - 0 1 Choice(0) - 0 1 0 Leaf [1] 0.27527634 - 0 1 1 Leaf [1] 0 - 1 Choice(1) - 1 0 Leaf [2] 0 - 1 1 Choice(0) - 1 1 0 Leaf [1] 0.44944733 - 1 1 1 Leaf [1] 0.27527634 - -)"; -#else - string expected = R"( P( 0 1 2 ): - Choice(2) - 0 Choice(1) - 0 0 Choice(0) - 0 0 0 Leaf [1] 0 - 0 0 1 Leaf [1] 0 - 0 1 Choice(0) - 0 1 0 Leaf [1] 0.27527634 - 0 1 1 Leaf [1] 0.44944733 - 1 Choice(1) - 1 0 Choice(0) - 1 0 0 Leaf [1] 0 - 1 0 1 Leaf [1] 0 - 1 1 Choice(0) - 1 1 0 Leaf [1] 0 - 1 1 1 Leaf [1] 0.27527634 - -)"; -#endif - - DiscreteKeys d0{{0, 2}, {1, 2}, {2, 2}}; - std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; - AlgebraicDecisionTree dt(d0, p0); - DecisionTreeFactor dtf(d0, dt); - DiscreteConditional f0(3, dtf); - - EXPECT(assert_print_equal(expected, f0)); - - DiscreteFactorGraph dfg{f0}; - dfg.print(); - auto dbn = dfg.eliminateSequential(); - dbn->print(); - - // DiscreteKeys d0{{0, 2}, {1, 2}}; - // std::vector p0 = {0, 1, 0, 2}; - // AlgebraicDecisionTree dt0(d0, p0); - // dt0.print("", DefaultKeyFormatter); - - // DiscreteKeys d1{{0, 2}}; - // std::vector p1 = {1, 1, 1, 1}; - // AlgebraicDecisionTree dt1(d0, p1); - // dt1.print("", DefaultKeyFormatter); - - // auto dd = dt0 / dt1; - // dd.print("", DefaultKeyFormatter); -} - - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index 871583a2cc..341eb63e38 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -349,118 +349,6 @@ TEST(DiscreteFactorGraph, markdown) { EXPECT_DOUBLES_EQUAL(0.3, graph[0]->operator()(values), 1e-9); } -/* ************************************************************************* */ -TEST(DiscreteFactorGraph, NrAssignments) { -#ifdef GTSAM_DT_MERGING - string expected_dfg = R"( -size: 2 -factor 0: f[ (m0,2), (m1,2), (m2,2), ] - Choice(m2) - 0 Choice(m1) - 0 0 Leaf [2] 0 - 0 1 Choice(m0) - 0 1 0 Leaf [1] 0.17054468 - 0 1 1 Leaf [1] 0 - 1 Choice(m1) - 1 0 Leaf [2] 0 - 1 1 Choice(m0) - 1 1 0 Leaf [1] 0.27845056 - 1 1 1 Leaf [1] 0.17054468 -factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] - Choice(m3) - 0 Choice(m2) - 0 0 Choice(m1) - 0 0 0 Leaf [2] 1 - 0 0 1 Leaf [2] 0.015366387 - 0 1 Choice(m1) - 0 1 0 Leaf [2] 1 - 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1] 0.015365663 - 1 Choice(m2) - 1 0 Choice(m1) - 1 0 0 Leaf [2] 1 - 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1] 0.0094115739 - 1 0 1 1 Leaf [1] 0.0094115652 - 1 1 Choice(m1) - 1 1 0 Leaf [2] 1 - 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1] 0.009321081 -)"; -#else - string expected_dfg = R"( -size: 2 -factor 0: f[ (m0,2), (m1,2), (m2,2), ] - Choice(m2) - 0 Choice(m1) - 0 0 Choice(m0) - 0 0 0 Leaf [1] 0 - 0 0 1 Leaf [1] 0 - 0 1 Choice(m0) - 0 1 0 Leaf [1] 0.27527634 - 0 1 1 Leaf [1] 0.44944733 - 1 Choice(m1) - 1 0 Choice(m0) - 1 0 0 Leaf [1] 0 - 1 0 1 Leaf [1] 0 - 1 1 Choice(m0) - 1 1 0 Leaf [1] 0 - 1 1 1 Leaf [1] 0.27527634 -factor 1: f[ (m0,2), (m1,2), (m2,2), (m3,2), ] - Choice(m3) - 0 Choice(m2) - 0 0 Choice(m1) - 0 0 0 Choice(m0) - 0 0 0 0 Leaf [1] 1 - 0 0 0 1 Leaf [1] 1 - 0 0 1 Choice(m0) - 0 0 1 0 Leaf [1] 0.015366387 - 0 0 1 1 Leaf [1] 0.015366387 - 0 1 Choice(m1) - 0 1 0 Choice(m0) - 0 1 0 0 Leaf [1] 1 - 0 1 0 1 Leaf [1] 1 - 0 1 1 Choice(m0) - 0 1 1 0 Leaf [1] 1 - 0 1 1 1 Leaf [1] 0.015365663 - 1 Choice(m2) - 1 0 Choice(m1) - 1 0 0 Choice(m0) - 1 0 0 0 Leaf [1] 1 - 1 0 0 1 Leaf [1] 1 - 1 0 1 Choice(m0) - 1 0 1 0 Leaf [1] 0.0094115739 - 1 0 1 1 Leaf [1] 0.0094115652 - 1 1 Choice(m1) - 1 1 0 Choice(m0) - 1 1 0 0 Leaf [1] 1 - 1 1 0 1 Leaf [1] 1 - 1 1 1 Choice(m0) - 1 1 1 0 Leaf [1] 1 - 1 1 1 1 Leaf [1] 0.009321081 -)"; -#endif - - DiscreteKeys d0{{M(0), 2}, {M(1), 2}, {M(2), 2}}; - std::vector p0 = {0, 0, 0.17054468, 0.27845056, 0, 0, 0, 0.17054468}; - AlgebraicDecisionTree dt(d0, p0); - DiscreteConditional f0(3, d0, dt); - - DiscreteKeys d1{{M(0), 2}, {M(1), 2}, {M(2), 2}, {M(3), 2}}; - std::vector p1 = { - 1, 1, 1, 1, 0.015366387, 0.0094115739, 1, 1, - 1, 1, 1, 1, 0.015366387, 0.0094115652, 0.015365663, 0.009321081}; - DecisionTreeFactor f1(d1, p1); - - DiscreteFactorGraph dfg; - dfg.add(f0); - dfg.add(f1); - - EXPECT(assert_print_equal(expected_dfg, dfg)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 5492234979..75ba5a0594 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -108,7 +108,7 @@ TEST(GaussianMixtureFactor, Printing) { std::string expected = R"(Hybrid [x1 x2; 1]{ Choice(1) - 0 Leaf [1] : + 0 Leaf : A[x1] = [ 0; 0 @@ -120,7 +120,7 @@ TEST(GaussianMixtureFactor, Printing) { b = [ 0 0 ] No noise model - 1 Leaf [1] : + 1 Leaf : A[x1] = [ 0; 0 diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 379eff27b6..a493de5c5f 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -493,7 +493,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf [1] : + 0 Leaf : A[x0] = [ -1 ] @@ -503,7 +503,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf [1] : + 1 Leaf : A[x0] = [ -1 ] @@ -517,7 +517,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf [1] : + 0 Leaf : A[x1] = [ -1 ] @@ -527,7 +527,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf [1] : + 1 Leaf : A[x1] = [ -1 ] @@ -551,16 +551,16 @@ factor 4: b = [ -10 ] No noise model factor 5: P( m0 ): - Leaf [2] 0.5 + Leaf 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf [1] 0.33333333 - 0 1 Leaf [1] 0.6 + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 1 Choice(m0) - 1 0 Leaf [1] 0.66666667 - 1 1 Leaf [1] 0.4 + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 )"; #else @@ -575,7 +575,7 @@ factor 0: factor 1: Hybrid [x0 x1; m0]{ Choice(m0) - 0 Leaf [1]: + 0 Leaf: A[x0] = [ -1 ] @@ -585,7 +585,7 @@ Hybrid [x0 x1; m0]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf: A[x0] = [ -1 ] @@ -599,7 +599,7 @@ Hybrid [x0 x1; m0]{ factor 2: Hybrid [x1 x2; m1]{ Choice(m1) - 0 Leaf [1]: + 0 Leaf: A[x1] = [ -1 ] @@ -609,7 +609,7 @@ Hybrid [x1 x2; m1]{ b = [ -1 ] No noise model - 1 Leaf [1]: + 1 Leaf: A[x1] = [ -1 ] @@ -634,17 +634,17 @@ factor 4: No noise model factor 5: P( m0 ): Choice(m0) - 0 Leaf [1] 0.5 - 1 Leaf [1] 0.5 + 0 Leaf 0.5 + 1 Leaf 0.5 factor 6: P( m1 | m0 ): Choice(m1) 0 Choice(m0) - 0 0 Leaf [1]0.33333333 - 0 1 Leaf [1] 0.6 + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 1 Choice(m0) - 1 0 Leaf [1]0.66666667 - 1 1 Leaf [1] 0.4 + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 )"; #endif @@ -657,13 +657,13 @@ size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), Choice(m0) - 0 Leaf [1] p(x0 | x1) + 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] No noise model - 1 Leaf [1] p(x0 | x1) + 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] @@ -673,26 +673,26 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf [1] p(x1 | x2) + 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] No noise model - 0 1 Leaf [1] p(x1 | x2) + 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] No noise model 1 Choice(m0) - 1 0 Leaf [1] p(x1 | x2) + 1 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] No noise model - 1 1 Leaf [1] p(x1 | x2) + 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] @@ -702,14 +702,14 @@ conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) - 0 0 Leaf [1] p(x2) + 0 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1489 ] mean: 1 elements x2: -1.0099 No noise model - 0 1 Leaf [1] p(x2) + 0 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1479 ] mean: 1 elements @@ -717,14 +717,14 @@ conditional 2: Hybrid P( x2 | m0 m1) No noise model 1 Choice(m0) - 1 0 Leaf [1] p(x2) + 1 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0504 ] mean: 1 elements x2: -1.0001 No noise model - 1 1 Leaf [1] p(x2) + 1 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0494 ] mean: 1 elements diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 03fdccff26..67a7fd8ae1 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -63,8 +63,8 @@ TEST(MixtureFactor, Printing) { R"(Hybrid [x1 x2; 1] MixtureFactor Choice(1) - 0 Leaf [1] Nonlinear factor on 2 keys - 1 Leaf [1] Nonlinear factor on 2 keys + 0 Leaf Nonlinear factor on 2 keys + 1 Leaf Nonlinear factor on 2 keys )"; EXPECT(assert_print_equal(expected, mixtureFactor)); } From c4d11c498c75213c1dfb8cb70330e174d4f65b9c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 6 Nov 2023 11:05:23 -0500 Subject: [PATCH 36/36] fix unittest assertion deprecation --- python/gtsam/tests/test_Robust.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/gtsam/tests/test_Robust.py b/python/gtsam/tests/test_Robust.py index c2ab9d4a47..1d7e9f52d4 100644 --- a/python/gtsam/tests/test_Robust.py +++ b/python/gtsam/tests/test_Robust.py @@ -10,10 +10,11 @@ """ import unittest -import gtsam import numpy as np from gtsam.utils.test_case import GtsamTestCase +import gtsam + class TestRobust(GtsamTestCase): @@ -37,7 +38,7 @@ def custom_loss(e): v = gtsam.Values() v.insert(0, 0.0) - self.assertAlmostEquals(f.error(v), 0.125) + self.assertAlmostEqual(f.error(v), 0.125) if __name__ == "__main__": unittest.main()