Skip to content

Commit

Permalink
Sequential module
Browse files Browse the repository at this point in the history
  • Loading branch information
fszewczyk committed Nov 7, 2023
1 parent 6702a9b commit d3ea8b8
Show file tree
Hide file tree
Showing 15 changed files with 164 additions and 99 deletions.
20 changes: 10 additions & 10 deletions include/ShkyeraTensor.hpp
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#pragma once

#include "src/Activation.hpp"
#include "src/Layer.hpp"
#include "src/Loss.hpp"
#include "src/MLP.hpp"
#include "src/Module.hpp"
#include "src/Neuron.hpp"
#include "src/Optimizer.hpp"
#include "src/Type.hpp"
#include "src/Value.hpp"
#include "src/Vector.hpp"
#include "src/core/Type.hpp"
#include "src/core/Value.hpp"
#include "src/core/Vector.hpp"
#include "src/nn/Activation.hpp"
#include "src/nn/Layer.hpp"
#include "src/nn/Loss.hpp"
#include "src/nn/Module.hpp"
#include "src/nn/Neuron.hpp"
#include "src/nn/Optimizer.hpp"
#include "src/nn/Sequential.hpp"
56 changes: 0 additions & 56 deletions include/src/MLP.hpp

This file was deleted.

13 changes: 0 additions & 13 deletions include/src/Module.hpp

This file was deleted.

File renamed without changes.
File renamed without changes.
5 changes: 5 additions & 0 deletions include/src/Value.hpp → include/src/core/Value.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,17 @@
#include <unordered_set>
#include <vector>

#include "Type.hpp"

namespace shkyera {

template <typename T> class Optimizer;
template <typename T> class Value;
template <typename T> using ValuePtr = std::shared_ptr<Value<T>>;

using Val32 = Value<Type::float32>;
using Val64 = Value<Type::float64>;

template <typename T> class Value : public std::enable_shared_from_this<Value<T>> {
private:
T _data = 0;
Expand Down
5 changes: 5 additions & 0 deletions include/src/Vector.hpp → include/src/core/Vector.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,15 @@

#include <exception>

#include "Type.hpp"
#include "Value.hpp"

namespace shkyera {

template <typename T> class Vector;
using Vec32 = Vector<Type::float32>;
using Vec64 = Vector<Type::float64>;

template <typename T> class Vector {
private:
std::vector<ValuePtr<T>> _values;
Expand Down
3 changes: 2 additions & 1 deletion include/src/Activation.hpp → include/src/nn/Activation.hpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#pragma once

#include "Value.hpp"
#include "../core/Type.hpp"
#include "../core/Value.hpp"

namespace shkyera::Activation {

Expand Down
15 changes: 14 additions & 1 deletion include/src/Layer.hpp → include/src/nn/Layer.hpp
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
#pragma once

#include "../core/Type.hpp"
#include "Activation.hpp"
#include "Module.hpp"
#include "Neuron.hpp"

namespace shkyera {

template <typename T> class Layer;
template <typename T> using LayerPtr = std::shared_ptr<Layer<T>>;

using Layer32 = Layer<Type::float32>;
using Layer64 = Layer<Type::float32>;

template <typename T> class Layer : public Module<T> {
private:
std::vector<Neuron<T>> _neurons;

public:
Layer(size_t input, size_t size, Activation::Function<T> activation = Activation::relu<T>);

public:
static LayerPtr<T> create(size_t input, size_t size, Activation::Function<T> activation = Activation::relu<T>);

virtual Vector<T> operator()(const Vector<T> &x) const override;
virtual std::vector<ValuePtr<T>> parameters() const override;
};
Expand All @@ -24,6 +33,10 @@ template <typename T> Layer<T>::Layer(size_t input, size_t size, Activation::Fun
}
}

template <typename T> LayerPtr<T> Layer<T>::create(size_t input, size_t size, Activation::Function<T> activation) {
return std::shared_ptr<Layer<T>>(new Layer<T>(input, size, activation));
}

template <typename T> Vector<T> Layer<T>::operator()(const Vector<T> &x) const {
std::vector<ValuePtr<T>> output(_neurons.size());

Expand Down
5 changes: 4 additions & 1 deletion include/src/Loss.hpp → include/src/nn/Loss.hpp
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
#pragma once

#include "Value.hpp"
#include "../core/Value.hpp"

namespace shkyera::Loss {

template <typename T> using Function = std::function<ValuePtr<T>(Vector<T> a, Vector<T> b)>;

using Function32 = Function<Type::float32>;
using Function64 = Function<Type::float64>;

template <typename T>
Function<T> MSE = [](Vector<T> a, Vector<T> b) {
if (a.size() != b.size()) {
Expand Down
20 changes: 20 additions & 0 deletions include/src/nn/Module.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#pragma once

#include "../core/Vector.hpp"

namespace shkyera {

template <typename T> class Module;
template <typename T> using ModulePtr = std::shared_ptr<Module<T>>;

template <typename T> class Module {
protected:
Module() = default;

public:
Vector<T> forward(const Vector<T> &x) const { return (*this)(x); }
virtual Vector<T> operator()(const Vector<T> &x) const { return x; }
virtual std::vector<ValuePtr<T>> parameters() const { return {}; }
};

} // namespace shkyera
17 changes: 11 additions & 6 deletions include/src/Neuron.hpp → include/src/nn/Neuron.hpp
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
#pragma once

#include "../core/Type.hpp"
#include "../core/Utils.hpp"
#include "../core/Value.hpp"
#include "../core/Vector.hpp"
#include "Module.hpp"
#include "Utils.hpp"
#include "Value.hpp"
#include "Vector.hpp"

namespace shkyera {

template <typename T> class Neuron : public Module<T> {
template <typename T> class Neuron;
using Neuron32 = Neuron<Type::float32>;
using Neuron64 = Neuron<Type::float64>;

template <typename T> class Neuron {
private:
ValuePtr<T> _bias;
Vector<T> _weights;
Expand All @@ -17,8 +22,8 @@ template <typename T> class Neuron : public Module<T> {
Neuron(size_t input);
Neuron(size_t input, std::function<ValuePtr<T>(ValuePtr<T>)> activation);

virtual Vector<T> operator()(const Vector<T> &x) const override;
virtual std::vector<ValuePtr<T>> parameters() const override;
Vector<T> operator()(const Vector<T> &x) const;
std::vector<ValuePtr<T>> parameters() const;
};

template <typename T> Neuron<T>::Neuron(size_t input) {
Expand Down
6 changes: 5 additions & 1 deletion include/src/Optimizer.hpp → include/src/nn/Optimizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@

#include <vector>

#include "../core/Type.hpp"
#include "../core/Value.hpp"
#include "Module.hpp"
#include "Value.hpp"

namespace shkyera {

using Optimizer32 = Optimizer<Type::float32>;
using Optimizer64 = Optimizer<Type::float32>;

template <typename T> class Optimizer {
private:
std::vector<ValuePtr<T>> _parameters;
Expand Down
76 changes: 76 additions & 0 deletions include/src/nn/Sequential.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#pragma once

#include "../core/Type.hpp"
#include "Activation.hpp"
#include "Module.hpp"

namespace shkyera {

template <typename T> class Sequential;
template <typename T> class SequentialBuilder;
template <typename T> using SequentialPtr = std::shared_ptr<Sequential<T>>;

using Sequential32 = Sequential<Type::float32>;
using Sequential64 = Sequential<Type::float64>;
using SequentialBuilder32 = SequentialBuilder<Type::float32>;
using SequentialBuilder64 = SequentialBuilder<Type::float64>;

template <typename T> class Sequential : public Module<T> {
private:
std::vector<ModulePtr<T>> _layers;

Sequential(const std::vector<ModulePtr<T>> &layers);

public:
static SequentialPtr<T> create(const std::vector<ModulePtr<T>> &layers);

virtual Vector<T> operator()(const Vector<T> &x) const override;
virtual std::vector<ValuePtr<T>> parameters() const override;
};

template <typename T> class SequentialBuilder {
private:
std::vector<ModulePtr<T>> _layers;

SequentialBuilder() = default;

public:
static SequentialBuilder<T> begin();

SequentialBuilder<T> add(ModulePtr<T> layer);
SequentialPtr<T> build();
};

template <typename T> Sequential<T>::Sequential(const std::vector<ModulePtr<T>> &layers) : _layers(layers) {}

template <typename T> SequentialPtr<T> Sequential<T>::create(const std::vector<ModulePtr<T>> &layers) {
return std::shared_ptr<Sequential<T>>(new Sequential<T>(layers));
}

template <typename T> Vector<T> Sequential<T>::operator()(const Vector<T> &x) const {
Vector<T> out = (*_layers[0])(x);

std::for_each(_layers.begin() + 1, _layers.end(), [&out](ModulePtr<T> layer) { out = layer->forward(out); });

return out;
}

template <typename T> std::vector<ValuePtr<T>> Sequential<T>::parameters() const {
std::vector<ValuePtr<T>> params;

for (const ModulePtr<T> &l : _layers) {
std::vector<ValuePtr<T>> layerParams = l->parameters();
params.insert(params.end(), layerParams.begin(), layerParams.end());
}

return params;
}

template <typename T> SequentialBuilder<T> SequentialBuilder<T>::begin() { return SequentialBuilder<T>(); }
template <typename T> SequentialBuilder<T> SequentialBuilder<T>::add(ModulePtr<T> layer) {
_layers.push_back(layer);
return *this;
}
template <typename T> SequentialPtr<T> SequentialBuilder<T>::build() { return Sequential<T>::create(_layers); }

} // namespace shkyera
22 changes: 12 additions & 10 deletions tests/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,29 @@
int main() {
using namespace shkyera;

using T = Type::float32;
std::vector<Vec32> xs = {Vec32::of({0, 0}), Vec32::of({1, 0}), Vec32::of({0, 1}), Vec32::of({1, 1})};
std::vector<Vec32> ys = {Vec32::of({0}), Vec32::of({1}), Vec32::of({1}), Vec32::of({0})};

std::vector<Vector<T>> xs = {Vector<T>::of({0, 0}), Vector<T>::of({1, 0}), Vector<T>::of({0, 1}),
Vector<T>::of({1, 1})};
std::vector<Vector<T>> ys = {Vector<T>::of({0}), Vector<T>::of({1}), Vector<T>::of({1}), Vector<T>::of({0})};
// clang-format off
auto mlp = SequentialBuilder<Type::float32>::begin()
.add(Layer32::create(2, 15, Activation::relu<Type::float32>))
.add(Layer32::create(15, 5, Activation::relu<Type::float32>))
.add(Layer32::create(5, 1, Activation::sigmoid<Type::float32>))
.build();
// clang-format on

auto mlp = MLP<T>(2, {5, 5, 1}, {Activation::relu<T>, Activation::relu<T>, Activation::sigmoid<T>});
auto optimizer = Optimizer<T>(mlp.parameters(), 0.1);
auto lossFunction = Loss::MSE<T>;
auto optimizer = Optimizer32(mlp->parameters(), 0.1);
auto lossFunction = Loss::MSE<Type::float32>;

for (size_t epoch = 0; epoch < 1000; epoch++) {
optimizer.resetGradient();

for (size_t sample = 0; sample < xs.size(); ++sample) {

auto pred = mlp(xs[sample]);
auto pred = mlp->forward(xs[sample]);
auto loss = lossFunction(pred, ys[sample]);

std::cerr << loss << '\n';
}

optimizer.stepGradient();
}
}

0 comments on commit d3ea8b8

Please sign in to comment.