From e30b15e43b3206b76964d8b78489e561a8b672ce Mon Sep 17 00:00:00 2001 From: "szewczyk.franciszek02" Date: Wed, 8 Nov 2023 19:27:33 +0100 Subject: [PATCH] Iterating with references --- examples/xor_classification.cpp | 4 ++-- include/nn/Neuron.hpp | 4 ++-- include/nn/activation/Exp.hpp | 2 +- include/nn/activation/ReLU.hpp | 2 +- include/nn/activation/Sigmoid.hpp | 2 +- include/nn/activation/Softmax.hpp | 4 ++-- include/nn/activation/Tanh.hpp | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/xor_classification.cpp b/examples/xor_classification.cpp index e8cd162..30ff5fa 100644 --- a/examples/xor_classification.cpp +++ b/examples/xor_classification.cpp @@ -16,7 +16,7 @@ int main() { auto mlp = SequentialBuilder::begin() .add(Linear32::create(2, 15)) .add(ReLU32::create()) - .add(Dropout32::create(15, 5, 0.3)) + .add(Dropout32::create(15, 5, 0.2)) .add(Tanh32::create()) .add(Linear32::create(5, 2)) .add(Softmax32::create()) @@ -27,7 +27,7 @@ int main() { Loss::Function32 lossFunction = Loss::CrossEntropy; // ------ TRAINING THE NETWORK ------- // - for (size_t epoch = 0; epoch < 500; epoch++) { + for (size_t epoch = 0; epoch < 100; epoch++) { auto epochLoss = Val32::create(0); optimizer.reset(); diff --git a/include/nn/Neuron.hpp b/include/nn/Neuron.hpp index a4cefa5..6d148de 100644 --- a/include/nn/Neuron.hpp +++ b/include/nn/Neuron.hpp @@ -46,8 +46,8 @@ template std::vector> Neuron::parameters() const { std::vector> params; params.reserve(_weights.size() + 1); - for (size_t i = 0; i < _weights.size(); ++i) - params.push_back(_weights[i]); + for (auto &w : _weights) + params.push_back(w); params.push_back(_bias); diff --git a/include/nn/activation/Exp.hpp b/include/nn/activation/Exp.hpp index 15521cf..b97bd1d 100644 --- a/include/nn/activation/Exp.hpp +++ b/include/nn/activation/Exp.hpp @@ -28,7 +28,7 @@ template Vector Exp::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->exp()); return Vector(out); diff --git a/include/nn/activation/ReLU.hpp b/include/nn/activation/ReLU.hpp index dfdce01..7c0f32b 100644 --- a/include/nn/activation/ReLU.hpp +++ b/include/nn/activation/ReLU.hpp @@ -28,7 +28,7 @@ template Vector ReLU::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->relu()); return Vector(out); diff --git a/include/nn/activation/Sigmoid.hpp b/include/nn/activation/Sigmoid.hpp index 6679421..cbc4ed7 100644 --- a/include/nn/activation/Sigmoid.hpp +++ b/include/nn/activation/Sigmoid.hpp @@ -30,7 +30,7 @@ template Vector Sigmoid::operator()(const Vector &x) const std::vector> out; out.reserve(x.size()); - for (auto entry : x) { + for (auto &entry : x) { out.emplace_back(entry->sigmoid()); } diff --git a/include/nn/activation/Softmax.hpp b/include/nn/activation/Softmax.hpp index eb581b7..004a99c 100644 --- a/include/nn/activation/Softmax.hpp +++ b/include/nn/activation/Softmax.hpp @@ -31,12 +31,12 @@ template Vector Softmax::operator()(const Vector &x) const out.reserve(x.size()); auto maxValue = Value::create(x[0]->getValue()); - for (auto entry : x) + for (auto &entry : x) if (entry > maxValue) maxValue = entry; auto sumExponentiated = Value::create(0); - for (auto entry : x) { + for (auto &entry : x) { auto exponentiated = (entry - maxValue)->exp(); out.emplace_back(exponentiated); sumExponentiated = sumExponentiated + exponentiated; diff --git a/include/nn/activation/Tanh.hpp b/include/nn/activation/Tanh.hpp index ad77677..5066526 100644 --- a/include/nn/activation/Tanh.hpp +++ b/include/nn/activation/Tanh.hpp @@ -28,7 +28,7 @@ template Vector Tanh::operator()(const Vector &x) const { std::vector> out; out.reserve(x.size()); - for (auto entry : x) + for (auto &entry : x) out.emplace_back(entry->tanh()); return Vector(out);