From 16b27f9c7c6ab4d02f7d90ef2632229c305f7cc0 Mon Sep 17 00:00:00 2001 From: "szewczyk.franciszek02" Date: Wed, 8 Nov 2023 17:58:01 +0100 Subject: [PATCH 1/4] Constant Iterator of vector --- include/core/Vector.hpp | 39 +++++++++++++++++++++++++++++++++++ include/nn/layers/Dropout.hpp | 4 ++-- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/include/core/Vector.hpp b/include/core/Vector.hpp index b13783a..fdaa4e2 100644 --- a/include/core/Vector.hpp +++ b/include/core/Vector.hpp @@ -15,6 +15,7 @@ namespace shkyera { template class Vector; + using Vec32 = Vector; using Vec64 = Vector; @@ -43,6 +44,21 @@ template class Vector { Vector &operator*=(ValuePtr val); ValuePtr operator[](size_t index) const; + + class ConstIterator { + private: + size_t _index; + const Vector &_vector; + + public: + ConstIterator(size_t index, const Vector &vector); + const ValuePtr operator*(); + ConstIterator &operator++(); + bool operator!=(const ConstIterator &other); + }; + + ConstIterator begin() const; + ConstIterator end() const; }; template Vector::Vector(std::vector> values) { _values = values; } @@ -141,4 +157,27 @@ template std::ostream &operator<<(std::ostream &os, const Vector return os; } +template typename Vector::ConstIterator Vector::begin() const { return ConstIterator(0, *this); } +template typename Vector::ConstIterator Vector::end() const { return ConstIterator(size(), *this); } + +template +Vector::ConstIterator::ConstIterator(size_t index, const Vector &vector) : _index(index), _vector(vector) {} + +template const ValuePtr Vector::ConstIterator::operator*() { + if (_index < _vector.size()) { + return _vector[_index]; + } + throw std::out_of_range("Vector iterator out of range. Tried to access index " + std::to_string(_index) + + " in a Vector of size " + std::to_string(_vector.size()) + "."); +} + +template typename Vector::ConstIterator &Vector::ConstIterator::operator++() { + ++_index; + return *this; +} + +template bool Vector::ConstIterator::operator!=(const ConstIterator &other) { + return _index != other._index; +} + } // namespace shkyera diff --git a/include/nn/layers/Dropout.hpp b/include/nn/layers/Dropout.hpp index c3a6063..be3966a 100644 --- a/include/nn/layers/Dropout.hpp +++ b/include/nn/layers/Dropout.hpp @@ -45,8 +45,8 @@ template DropoutPtr Dropout::create(size_t input, size_t size template Vector Dropout::operator()(const Vector &x) const { std::vector> alteredInput; alteredInput.reserve(x.size()); - for (size_t i = 0; i < x.size(); ++i) - alteredInput.push_back(x[i]); + for (const ValuePtr &val : x) + alteredInput.push_back(val); std::vector indicesToRemove = utils::sample(0, x.size() - 1, _dropout * x.size(), false); for (size_t idxToRemove : indicesToRemove) From 006f3f157893545e5bd5cc0a256d84961cd57a7e Mon Sep 17 00:00:00 2001 From: "szewczyk.franciszek02" Date: Wed, 8 Nov 2023 18:08:57 +0100 Subject: [PATCH 2/4] Iterating while printing --- include/core/Vector.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/core/Vector.hpp b/include/core/Vector.hpp index fdaa4e2..e730cb9 100644 --- a/include/core/Vector.hpp +++ b/include/core/Vector.hpp @@ -150,7 +150,7 @@ template ValuePtr Vector::operator[](size_t index) const { re template std::ostream &operator<<(std::ostream &os, const Vector &vector) { os << "Vector(size=" << vector.size() << ", data={"; - for (const ValuePtr val : vector._values) + for (auto val : vector) os << val << ' '; os << "})"; From 71bca9be91958b73a671115aac324ad0cf7ca0de Mon Sep 17 00:00:00 2001 From: "szewczyk.franciszek02" Date: Wed, 8 Nov 2023 18:31:45 +0100 Subject: [PATCH 3/4] More iterators! --- examples/xor_classification.cpp | 4 ++-- include/nn/activation/Exp.hpp | 5 ++--- include/nn/activation/ReLU.hpp | 5 ++--- include/nn/activation/Sigmoid.hpp | 4 ++-- include/nn/activation/Softmax.hpp | 10 +++++----- include/nn/activation/Tanh.hpp | 5 ++--- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/examples/xor_classification.cpp b/examples/xor_classification.cpp index d4961c5..e8cd162 100644 --- a/examples/xor_classification.cpp +++ b/examples/xor_classification.cpp @@ -16,7 +16,7 @@ int main() { auto mlp = SequentialBuilder::begin() .add(Linear32::create(2, 15)) .add(ReLU32::create()) - .add(Dropout32::create(15, 5, 0.2)) + .add(Dropout32::create(15, 5, 0.3)) .add(Tanh32::create()) .add(Linear32::create(5, 2)) .add(Softmax32::create()) @@ -27,7 +27,7 @@ int main() { Loss::Function32 lossFunction = Loss::CrossEntropy; // ------ TRAINING THE NETWORK ------- // - for (size_t epoch = 0; epoch < 200; epoch++) { + for (size_t epoch = 0; epoch < 500; epoch++) { auto epochLoss = Val32::create(0); optimizer.reset(); diff --git a/include/nn/activation/Exp.hpp b/include/nn/activation/Exp.hpp index 8ec7a43..15521cf 100644 --- a/include/nn/activation/Exp.hpp +++ b/include/nn/activation/Exp.hpp @@ -28,9 +28,8 @@ template Vector Exp::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (size_t i = 0; i < x.size(); ++i) { - out.emplace_back(x[i]->exp()); - } + for (auto entry : x) + out.emplace_back(entry->exp()); return Vector(out); } diff --git a/include/nn/activation/ReLU.hpp b/include/nn/activation/ReLU.hpp index b1757c7..dfdce01 100644 --- a/include/nn/activation/ReLU.hpp +++ b/include/nn/activation/ReLU.hpp @@ -28,9 +28,8 @@ template Vector ReLU::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (size_t i = 0; i < x.size(); ++i) { - out.emplace_back(x[i]->relu()); - } + for (auto entry : x) + out.emplace_back(entry->relu()); return Vector(out); } diff --git a/include/nn/activation/Sigmoid.hpp b/include/nn/activation/Sigmoid.hpp index 8648cfe..6679421 100644 --- a/include/nn/activation/Sigmoid.hpp +++ b/include/nn/activation/Sigmoid.hpp @@ -30,8 +30,8 @@ template Vector Sigmoid::operator()(const Vector &x) const std::vector> out; out.reserve(x.size()); - for (size_t i = 0; i < x.size(); ++i) { - out.emplace_back(x[i]->sigmoid()); + for (auto entry : x) { + out.emplace_back(entry->sigmoid()); } return Vector(out); diff --git a/include/nn/activation/Softmax.hpp b/include/nn/activation/Softmax.hpp index 6f80c09..eb581b7 100644 --- a/include/nn/activation/Softmax.hpp +++ b/include/nn/activation/Softmax.hpp @@ -31,13 +31,13 @@ template Vector Softmax::operator()(const Vector &x) const out.reserve(x.size()); auto maxValue = Value::create(x[0]->getValue()); - for (size_t i = 1; i < x.size(); ++i) - if (x[i] > maxValue) - maxValue = x[i]; + for (auto entry : x) + if (entry > maxValue) + maxValue = entry; auto sumExponentiated = Value::create(0); - for (size_t i = 0; i < x.size(); ++i) { - auto exponentiated = (x[i] - maxValue)->exp(); + for (auto entry : x) { + auto exponentiated = (entry - maxValue)->exp(); out.emplace_back(exponentiated); sumExponentiated = sumExponentiated + exponentiated; } diff --git a/include/nn/activation/Tanh.hpp b/include/nn/activation/Tanh.hpp index 1bed7ed..ad77677 100644 --- a/include/nn/activation/Tanh.hpp +++ b/include/nn/activation/Tanh.hpp @@ -28,9 +28,8 @@ template Vector Tanh::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (size_t i = 0; i < x.size(); ++i) { - out.emplace_back(x[i]->tanh()); - } + for (auto entry : x) + out.emplace_back(entry->tanh()); return Vector(out); } From e30b15e43b3206b76964d8b78489e561a8b672ce Mon Sep 17 00:00:00 2001 From: "szewczyk.franciszek02" Date: Wed, 8 Nov 2023 19:27:33 +0100 Subject: [PATCH 4/4] Iterating with references --- examples/xor_classification.cpp | 4 ++-- include/nn/Neuron.hpp | 4 ++-- include/nn/activation/Exp.hpp | 2 +- include/nn/activation/ReLU.hpp | 2 +- include/nn/activation/Sigmoid.hpp | 2 +- include/nn/activation/Softmax.hpp | 4 ++-- include/nn/activation/Tanh.hpp | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/xor_classification.cpp b/examples/xor_classification.cpp index e8cd162..30ff5fa 100644 --- a/examples/xor_classification.cpp +++ b/examples/xor_classification.cpp @@ -16,7 +16,7 @@ int main() { auto mlp = SequentialBuilder::begin() .add(Linear32::create(2, 15)) .add(ReLU32::create()) - .add(Dropout32::create(15, 5, 0.3)) + .add(Dropout32::create(15, 5, 0.2)) .add(Tanh32::create()) .add(Linear32::create(5, 2)) .add(Softmax32::create()) @@ -27,7 +27,7 @@ int main() { Loss::Function32 lossFunction = Loss::CrossEntropy; // ------ TRAINING THE NETWORK ------- // - for (size_t epoch = 0; epoch < 500; epoch++) { + for (size_t epoch = 0; epoch < 100; epoch++) { auto epochLoss = Val32::create(0); optimizer.reset(); diff --git a/include/nn/Neuron.hpp b/include/nn/Neuron.hpp index a4cefa5..6d148de 100644 --- a/include/nn/Neuron.hpp +++ b/include/nn/Neuron.hpp @@ -46,8 +46,8 @@ template std::vector> Neuron::parameters() const { std::vector> params; params.reserve(_weights.size() + 1); - for (size_t i = 0; i < _weights.size(); ++i) - params.push_back(_weights[i]); + for (auto &w : _weights) + params.push_back(w); params.push_back(_bias); diff --git a/include/nn/activation/Exp.hpp b/include/nn/activation/Exp.hpp index 15521cf..b97bd1d 100644 --- a/include/nn/activation/Exp.hpp +++ b/include/nn/activation/Exp.hpp @@ -28,7 +28,7 @@ template Vector Exp::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->exp()); return Vector(out); diff --git a/include/nn/activation/ReLU.hpp b/include/nn/activation/ReLU.hpp index dfdce01..7c0f32b 100644 --- a/include/nn/activation/ReLU.hpp +++ b/include/nn/activation/ReLU.hpp @@ -28,7 +28,7 @@ template Vector ReLU::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->relu()); return Vector(out); diff --git a/include/nn/activation/Sigmoid.hpp b/include/nn/activation/Sigmoid.hpp index 6679421..cbc4ed7 100644 --- a/include/nn/activation/Sigmoid.hpp +++ b/include/nn/activation/Sigmoid.hpp @@ -30,7 +30,7 @@ template Vector Sigmoid::operator()(const Vector &x) const std::vector> out; out.reserve(x.size()); - for (auto entry : x) { + for (auto &entry : x) { out.emplace_back(entry->sigmoid()); } diff --git a/include/nn/activation/Softmax.hpp b/include/nn/activation/Softmax.hpp index eb581b7..004a99c 100644 --- a/include/nn/activation/Softmax.hpp +++ b/include/nn/activation/Softmax.hpp @@ -31,12 +31,12 @@ template Vector Softmax::operator()(const Vector &x) const out.reserve(x.size()); auto maxValue = Value::create(x[0]->getValue()); - for (auto entry : x) + for (auto &entry : x) if (entry > maxValue) maxValue = entry; auto sumExponentiated = Value::create(0); - for (auto entry : x) { + for (auto &entry : x) { auto exponentiated = (entry - maxValue)->exp(); out.emplace_back(exponentiated); sumExponentiated = sumExponentiated + exponentiated; diff --git a/include/nn/activation/Tanh.hpp b/include/nn/activation/Tanh.hpp index ad77677..5066526 100644 --- a/include/nn/activation/Tanh.hpp +++ b/include/nn/activation/Tanh.hpp @@ -28,7 +28,7 @@ template Vector Tanh::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->tanh()); return Vector(out);