-
Notifications
You must be signed in to change notification settings - Fork 0
/
NeuralNetwork.h
101 lines (81 loc) · 3.3 KB
/
NeuralNetwork.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#ifndef NEURALNETWORK_H
#define NEURALNETWORK_H
#include <EasySerDes.h>
#include <fmt/format.h>
#include <vector>
#include <memory>
/**
* A basic NeuralNetwork with no backward propogation. The sigma function
* operates between 0.0 and 0.1.
*/
class NeuralNetwork {
public:
// I don't usually like typedefs, but here we can collapse a complex 3D array into manageable types without creating a bunch of structs
using InputWeight = double;
using Node = std::vector<InputWeight>;
using Layer = std::vector<Node>;
enum class InitialWeights : bool {
Random,
PassThrough,
};
// FIXME work out how to not have this hard coded
static constexpr unsigned BRAIN_WIDTH = 7;
/**
* Creates a rectangular network of the specified width and height, with
* random edge weights between 0.0 and 1.0.
*/
NeuralNetwork(unsigned layerCount, std::size_t width, InitialWeights initialWeights);
NeuralNetwork(std::vector<Layer>&& layers, std::size_t width);
size_t GetInputCount() const { return layers_.empty() ? 0 : layers_.front().size(); }
size_t GetOutputCount() const { return layers_.empty() ? 0 : layers_.back().empty() ? 0 : layers_.back().size(); }
size_t GetConnectionCount() const;
/**
* Inputs should be between 0.0 and 1.0 inclusive. Returns the final node
* values.
*/
void ForwardPropogate(std::vector<double>& inputs) const;
void ForEach(const std::function<void(unsigned, unsigned, const Node&)>& perNode) const;
size_t GetLayerWidth() const { return width_; }
size_t GetLayerCount() const { return layers_.size(); }
const std::vector<Layer>& GetLayers() const { return layers_; }
std::shared_ptr<NeuralNetwork> WithMutatedConnections() const;
std::shared_ptr<NeuralNetwork> WithColumnAdded(size_t index, InitialWeights connections) const;
std::shared_ptr<NeuralNetwork> WithColumnRemoved(size_t index) const;
std::shared_ptr<NeuralNetwork> WithRowAdded(size_t index, InitialWeights connections) const;
std::shared_ptr<NeuralNetwork> WithRowRemoved(size_t index) const;
private:
static inline std::vector<double> previousNodeValues_;
std::vector<Layer> layers_;
size_t width_;
static std::vector<Layer> CreateRandomLayers(size_t layerCount, size_t width);
static Layer CreateRandomLayer(size_t width);
static std::vector<Layer> CreatePassThroughLayers(size_t layerCount, size_t width);
static Layer CreatePassThroughLayer(size_t width);
std::vector<Layer> CopyLayers() const;
};
template<>
struct fmt::formatter<NeuralNetwork>
{
template<typename ParseContext>
constexpr auto parse(ParseContext& context)
{
return context.begin();
}
template <typename FormatContext>
auto format(const NeuralNetwork& network, FormatContext& context)
{
return fmt::format_to(context.out(), "{} inputs, {} layers", network.GetInputCount(), network.GetLayerCount());
}
};
template<>
class esd::Serialiser<NeuralNetwork> : public esd::ClassHelper<NeuralNetwork, std::vector<NeuralNetwork::Layer>, size_t> {
public:
static void Configure()
{
SetConstruction(
CreateParameter(&NeuralNetwork::GetLayers, "Layers"),
CreateParameter(&NeuralNetwork::GetLayerWidth, "Width")
);
}
};
#endif // NEURALNETWORK_H