From ef1810c336fd00cf926e783e5b1b0d0cfe9e79fc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Radek=20Pu=C5=A1?= <pusradek@fit.cvut.cz>
Date: Fri, 12 Jul 2019 20:11:46 +0200
Subject: [PATCH] UNCOMPILABLE CODE - backward propagation

---
 NeuronNetwork/NeuronNetwork/Corrector.cpp     |  89 ++++++++++++++++++
 NeuronNetwork/NeuronNetwork/Corrector.h       |  57 +++++++++++
 NeuronNetwork/NeuronNetwork/Define.h          |   4 +
 NeuronNetwork/NeuronNetwork/Layer.cpp         |  48 +++++++++-
 NeuronNetwork/NeuronNetwork/Layer.h           |  11 ++-
 NeuronNetwork/NeuronNetwork/Network.cpp       |  61 ++++++++++--
 NeuronNetwork/NeuronNetwork/Network.h         |  25 +++--
 NeuronNetwork/NeuronNetwork/Neuron.cpp        |  76 ++++++++++++---
 NeuronNetwork/NeuronNetwork/Neuron.h          |  33 ++++---
 NeuronNetwork/NeuronNetwork/NeuronNetwork.aps | Bin 0 -> 1464 bytes
 NeuronNetwork/NeuronNetwork/NeuronNetwork.cpp |  39 +++++++-
 NeuronNetwork/NeuronNetwork/NeuronNetwork.rc  | Bin 0 -> 2676 bytes
 .../NeuronNetwork/NeuronNetwork.vcxproj       |   5 +
 .../NeuronNetwork.vcxproj.filters             |  44 +++++++--
 .../NeuronNetwork/TemplateFunctions.cpp       |  33 +++++++
 .../NeuronNetwork/TemplateFunctions.h         |  20 ++++
 NeuronNetwork/NeuronNetwork/resource.h        |  14 +++
 17 files changed, 497 insertions(+), 62 deletions(-)
 create mode 100644 NeuronNetwork/NeuronNetwork/Corrector.cpp
 create mode 100644 NeuronNetwork/NeuronNetwork/Corrector.h
 create mode 100644 NeuronNetwork/NeuronNetwork/Define.h
 create mode 100644 NeuronNetwork/NeuronNetwork/NeuronNetwork.aps
 create mode 100644 NeuronNetwork/NeuronNetwork/NeuronNetwork.rc
 create mode 100644 NeuronNetwork/NeuronNetwork/TemplateFunctions.cpp
 create mode 100644 NeuronNetwork/NeuronNetwork/TemplateFunctions.h
 create mode 100644 NeuronNetwork/NeuronNetwork/resource.h

diff --git a/NeuronNetwork/NeuronNetwork/Corrector.cpp b/NeuronNetwork/NeuronNetwork/Corrector.cpp
new file mode 100644
index 0000000..e98d491
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/Corrector.cpp
@@ -0,0 +1,89 @@
+#include "Corrector.h"
+
+Corrector::Corrector(const vector<Neuron*>& neurons, const double sumOfNeuronWeights, const int layerHeight)
+	:sumOfNeuronWeights(sumOfNeuronWeights), layerHeight(layerHeight)
+{
+	SetWeights(neurons);
+}
+
+Corrector::~Corrector()
+{
+	delete weightMatrix;
+}
+
+// E_x = W_x / Sum(W_i) * E_x
+// TODO sum doesnť have to be compute
+double Corrector::GetError(const RowVectorXd& weights, const VectorXd& errors, const int length) {
+
+	double sum = 0;
+	for (int i = 0; i < weights.rows(); i++)
+		sum += weights[i];
+
+	double WeightErrorSum = (weights * errors)[0];
+
+	return WeightErrorSum / sum;
+}
+
+//TODO: store weights in VectorXd instead of RowVectorXd
+double Corrector::SetWeights(const vector<Neuron*>& neurons)
+{
+	if (weightMatrix != NULL)
+		delete weightMatrix;
+
+	weightMatrix = new MatrixXd(neurons.size(), layerHeight);
+	for (int i = 0; i < neurons.size(); i++) {
+		(*weightMatrix).row(i) = neurons[i]->GetWeights();
+	}
+	weightMatrix->transposeInPlace();
+}
+
+
+/*const double Repair() const {
+		//
+		// chain rule
+		//
+		// d(E_total)     d(E_total)    d( f(out) )     d(out)
+		// ---------  ==  ----------- * ------------- * -------
+		// d(w)		      d( f(out) )   d(out)          d(w)
+
+		//------------------------------------------------------
+		//  => RET    ==       X      *    Y          *   Z
+		//------------------------------------------------------
+
+		const double X = target - output;
+
+		if (abs(X) < 0.001)
+			return 0;
+
+		const double Y = Derive(output, DEFAULT_ACTIVATION_FUNCTION);
+		const double Z = input;
+		//const double Z = Activate(input, DEFAULT_ACTIVATION_FUNCTION);
+
+		const double RET = X * Y * Z;
+
+		if (abs(RET) < 0.001)
+			return 0;
+
+		return DEFAULT_LEARNING_RATE * (-RET);
+	}
+
+
+	RowVectorXd RepairAll(const RowVectorXd& originalWeights, const VectorXd& inputs, const double target) {
+
+		RowVectorXd delta(originalWeights.size());
+		this->target = target;
+
+		for (int i = 0; i < inputs.size(); i++) {
+			input = inputs[i];
+			output = inputs[i] * originalWeights[i];
+
+			if (input > 1)
+				input = Activate(input, DEFAULT_ACTIVATION_FUNCTION);
+			if (output > 1)
+				output = Activate(output, DEFAULT_ACTIVATION_FUNCTION);
+
+			delta[i] = Repair();
+		}
+
+		return delta;
+	}*/
\ No newline at end of file
diff --git a/NeuronNetwork/NeuronNetwork/Corrector.h b/NeuronNetwork/NeuronNetwork/Corrector.h
new file mode 100644
index 0000000..b4b7b4f
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/Corrector.h
@@ -0,0 +1,57 @@
+#pragma once
+#include <Eigen/Dense>
+#include "TemplateFunctions.h"
+#include "Define.h"
+#include "Neuron.h"
+
+using namespace Eigen;
+
+class Corrector
+{
+public:
+
+	Corrector(const vector<Neuron*>& neurons, const double sumOfNeuronWeights, const int layerHeight);
+	~Corrector();
+
+	double GetError(const RowVectorXd& weights, const VectorXd& errors, const int length);
+	double SetWeights(const vector<Neuron*>& neurons);
+
+	// previous layer neurons (rows) X actual neurons (columns)
+	MatrixXd* weightMatrix;
+	
+	
+	// outpu and input can be calculated first -> less memory usage
+	// not really: computed "2*3 = 6" vs stored "2 + 3 = 5"
+
+	vector<MatrixXd*> errors; //2 x 3 !!!
+	//x
+	vector<VectorXd*> output; // 2 x 1
+	//x
+	vector<RowVectorXd*> input; // 1 x 3
+	
+	
+	
+	
+	
+	
+
+private:
+	double sumOfNeuronWeights = -1;
+	const int layerHeight;
+	/*Corrector() {};
+
+	/// <summary>
+	/// </summary>
+	/// <param name="input">  connected neuron output </param>
+	/// <param name="output"> sqashed output value </param>
+	/// <param name="target"> targeted value </param>
+	Corrector(const double input, const double output, const double target) : input(input), output(output), target(target), error(target-output) {};
+
+	//for each weight
+	double input = -1;
+	double output = -1;//sqashed
+	double target = -1;
+
+	double error = -1;*/
+};
+
diff --git a/NeuronNetwork/NeuronNetwork/Define.h b/NeuronNetwork/NeuronNetwork/Define.h
new file mode 100644
index 0000000..eadc29a
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/Define.h
@@ -0,0 +1,4 @@
+#pragma once
+#define DEFAULT_ACTIVATION_FUNCTION Sigmoid
+#define DEFAULT_BIAS 0.5
+#define DEFAULT_LEARNING_RATE 0.5 //TODO schedule
diff --git a/NeuronNetwork/NeuronNetwork/Layer.cpp b/NeuronNetwork/NeuronNetwork/Layer.cpp
index 75f8f1e..b48c942 100644
--- a/NeuronNetwork/NeuronNetwork/Layer.cpp
+++ b/NeuronNetwork/NeuronNetwork/Layer.cpp
@@ -1,33 +1,75 @@
 #include "Layer.h"
 
+
+//TODO: remove (multiple for cycles)
+const double getSumOfWeightsFromNeurons(const vector<Neuron*> &neurons) {
+
+	double sum = 0;
+	for (auto& n : neurons)
+		sum += n->sumOfWeights;
+
+	return sum;
+}
+
 Layer::Layer(const int size, const Layer* prevLayer)
 	:prevLayer(prevLayer)
 {
 	neurons.reserve(size);
 	for (int i = 0; i < size; i++)
 		neurons.push_back(new Neuron(prevLayer == NULL ? 0 : prevLayer->Size()));
+
+	const double sumOfNeuronWeights = getSumOfWeightsFromNeurons(neurons);
+	corrector = new Corrector(neurons,sumOfNeuronWeights);
+}
+
+Layer::~Layer()
+{
+	for (auto& neuron : neurons)
+		delete neuron;
 }
 
+/// <summary>
+/// get all activated values from neurons in this layer
+/// </summary>
+/// <returns>Eigen vector of activate values</returns>
 Eigen::VectorXd* Layer::CreateValueVector() const
 {
-	Eigen::VectorXd* vec = new Eigen::VectorXd(neurons.size());
+	Eigen::VectorXd* vec = new Eigen::VectorXd(neurons.size()+1);
 
 	for (int i = 0; i < neurons.size(); i++) {
 		(*vec)[i] = neurons[i]->GetActivatedValue();
 	}
+	(*vec)[neurons.size()] = DEFAULT_BIAS;
 	return vec;
 }
 
+/// <summary>
+/// insert Eigen vector of regular values (for learning) inside all neurons in this layer
+/// </summary>
+/// <param name="values">eigen vector of regular values</param>
 void Layer::SetNeuronValues(const VectorXd& values)
 {
 	for (int i = 0; i < neurons.size(); i++)
 		neurons[i]->SetValues(values);
 }
 
-void Layer::SetRandomInitialValues()
+void Layer::AddTargetValue(const double value)
+{
+	for (int i = 0; i < neurons.size() - 1; i++)
+		neurons[i]->AddTargetValue(value);
+}
+
+void Layer::BackPropagate()
 {
-	srand(time(NULL));
+	for (int i = 0; i < neurons.size() - 1; i++)
+		neurons[i]->BackwardPropagate();
+}
 
+/// <summary>
+/// init layer with random regular values (from 0 to 9)
+/// </summary>
+void Layer::SetRandomInitialValues()
+{
 	for (auto& neuron : neurons) {
 		neuron->SetInitValue(rand() % 10);
 	}
diff --git a/NeuronNetwork/NeuronNetwork/Layer.h b/NeuronNetwork/NeuronNetwork/Layer.h
index 7999bc0..aa9c119 100644
--- a/NeuronNetwork/NeuronNetwork/Layer.h
+++ b/NeuronNetwork/NeuronNetwork/Layer.h
@@ -1,18 +1,23 @@
-#include "Neuron.h"
+#pragma once
 #include <time.h>
 #include <Eigen/Dense>
-#pragma once
+#include "Neuron.h"
+#include "Define.h"
+#include "Corrector.h"
 
 class Layer
 {
 public:
 	Layer(const int size, const Layer* prevLayer);
+	virtual ~Layer();
 
 	Eigen::VectorXd* CreateValueVector() const;
 
 	Neuron& operator[] (const int index) { return *neurons[index]; }
 	const size_t Size() const { return neurons.size(); }
 	void SetNeuronValues(const VectorXd& values);
+	void AddTargetValue(const double value);
+	void BackPropagate();
 
 	void SetRandomInitialValues();
 
@@ -20,6 +25,6 @@ private:
 	vector<Neuron*> neurons;
 	const Layer* prevLayer;
 
-
+	Corrector *corrector;
 };
 
diff --git a/NeuronNetwork/NeuronNetwork/Network.cpp b/NeuronNetwork/NeuronNetwork/Network.cpp
index fa11722..4aed77f 100644
--- a/NeuronNetwork/NeuronNetwork/Network.cpp
+++ b/NeuronNetwork/NeuronNetwork/Network.cpp
@@ -5,6 +5,11 @@ Network::Network(const double learningRate)
 {
 }
 
+/// <summary>
+/// create whole network including layers and neurons
+/// </summary>
+/// <param name="width">network width (including input & output layers)</param>
+/// <param name="form">how many neurons each layer has</param>
 void Network::CreateNetwork(const int width, const int* form)
 {
 	network.reserve(width);
@@ -16,33 +21,71 @@ void Network::CreateNetwork(const int width, const int* form)
 	return;
 }
 
-void Network::LoadNetwork(const int width, const int* form, const VectorXd& values)
+void Network::AddTargetValue(const double value)
 {
-	CreateNetwork(width, form);
-	for (int i = 0; i < width; i++)
+	for (int i = 1; i < network.size(); i++)
+		network[i]->AddTargetValue(value);
+}
+
+void Network::BackPropagate()
+{
+	for (int i = 1; i < network.size(); i++)
+		network[i]->BackPropagate();
+}
+
+/// <summary>
+/// Load saved data to the neurons of the network
+/// </summary>
+/// <param name="values">data to load</param>
+void Network::LoadNetwork(VectorXd& values)
+{
+	for (int i = 0; i < network.size(); i++)
 		network[i]->SetNeuronValues(values);
 }
 
-void PrintNeuronsToConsole(Layer& layer) {
+/// <summary>
+/// print actual state of neurons to the console
+/// </summary>
+/// <param name="layer"></param>
+void Network::PrintLayerToConsole(Layer& layer) {
+
+	cout << "==================================================\n";
+	cout << "Activated values:\n";
+	cout << "==================================================\n";
+
 	for (int j = 0; j < layer.Size(); j++)
 		cout << layer[j].GetActivatedValue() << ' ';
 	cout << endl;
+
+	cout << "==================================================\n";
+	cout << "weights:\n";
+	cout << "==================================================\n";
+
+	for (int j = 0; j < layer.Size(); j++) {
+		RowVectorXd weights = layer[j].GetWeights();
+		
+		cout << "neuron " << j << ": ";
+		for (int k = 0; k < weights.size(); k++)
+			cout << round(weights[k]*1000)/1000.0 << '\t';
+		cout << '\n';
+		//cout << "neuron " << j << ":" << layer[j].GetWeights() << '\n';
+	}
+	cout << endl;
 }
 
 void Network::FeedForward()
 {
-	cout << '\n';
 	network[0]->SetRandomInitialValues();
 
-	for (int i = 0; i < network.size()-1; i++)
+	for (int i = 0; i < network.size() - 1; i++)
 	{
 		Eigen::VectorXd* A = network[i]->CreateValueVector();
-		network[i+1]->SetNeuronValues(*A);
+		network[i + 1]->SetNeuronValues(*A);
 		delete A;
 
-		PrintNeuronsToConsole(*network[i]);
+		//PrintNeuronsToConsole(*network[i]);
 	}
-	PrintNeuronsToConsole(*network.back());
+	//PrintNeuronsToConsole(*network.back());
 }
 
 Network::~Network()
diff --git a/NeuronNetwork/NeuronNetwork/Network.h b/NeuronNetwork/NeuronNetwork/Network.h
index 46e3de2..8ddaa90 100644
--- a/NeuronNetwork/NeuronNetwork/Network.h
+++ b/NeuronNetwork/NeuronNetwork/Network.h
@@ -1,9 +1,11 @@
-#include "Neuron.h"
-#include "Layer.h"
+#pragma once
 #include <iostream>
 #include <vector>
 #include <Eigen/Dense>
-#pragma once
+#include <cmath>
+#include "Neuron.h"
+#include "Layer.h"
+
 
 using namespace std;
 
@@ -15,16 +17,23 @@ public:
 
 	void CreateNetwork(const int width, const int* form);
 
+	void AddTargetValue(const double value);
+	void BackPropagate();
+	void FeedForward();
+
 	//============= TODO: conversion array -> VectorXd =================
-	void LoadNetwork(const int width, const int* form, const VectorXd& values);
+	void LoadNetwork(VectorXd& values);
 
-	//================== DEBUG =================
-	void FeedForward();
-	//================== !DEBUG ================
+	void PrintNet() {
+		for (int i = 1; i < network.size(); i++)
+			PrintLayerToConsole(*(network[i]));
+	};
 
 private:
 	const double learningRate;
 	vector<Layer*> network;
 
-	
+	void PrintLayerToConsole(Layer& layer);
+
+
 };
\ No newline at end of file
diff --git a/NeuronNetwork/NeuronNetwork/Neuron.cpp b/NeuronNetwork/NeuronNetwork/Neuron.cpp
index dd37e80..aaae0a5 100644
--- a/NeuronNetwork/NeuronNetwork/Neuron.cpp
+++ b/NeuronNetwork/NeuronNetwork/Neuron.cpp
@@ -1,31 +1,79 @@
 #include "Neuron.h"
-
+#include <iostream>
+#include "Corrector.h"
 
 Neuron::Neuron(const size_t layerHeight)
+	:layerHeight(layerHeight)
 {
-	weights = RowVectorXd(layerHeight);
+	if (layerHeight < 1)
+		return;
 
-	for (int i = 0; i < layerHeight; i++)
-		weights[i] = DEFAULT_WEIGHT;
-}
+	//bias -> +1
+	weights = RowVectorXd(layerHeight + 1);
 
+	for (int i = 0; i <= layerHeight; i++) {
+		double random = ((rand() % 199) - 100) / 100.0;
+		//cout << "neuron weight: " << random << endl;
+		weights[i] = random;
+		if (weights[i] == 0) {
+			i--; //no null in generated weights
+			continue;
+		}
+
+		sumOfWeights += random;
+	}
+}
 
-void Neuron::Activate(const double value)
+/// <summary>
+/// Set computed activated and derivated value based on actual weights
+/// </summary>
+/// <param name="values"></param>
+/// <returns></returns>
+const double Neuron::SetValues(const VectorXd& values)
 {
-	activatedValue = value / abs(1 + (value));
+	value = weights * values;
+	activatedValue = Activate(value, DEFAULT_ACTIVATION_FUNCTION);
+
+	//data backup
+	this->inputs.push_back(values);
+
+	return value;
 }
 
-void Neuron::Derive(const double value)
+void Neuron::AddTargetValue(const double target)
 {
-	derivatedValue = activatedValue * (1 - activatedValue);
+	errors.push_back(target-activatedValue);
+	errorSum += target - activatedValue;
+
+	/*if (IsActive() != target > 0) {
+		targets.push_back(target);
+		return;
+	}
+	
+	inputs.pop_back();*/
 }
 
-const double Neuron::SetValues(const VectorXd & values)
+void Neuron::BackwardPropagate()
 {
-	this->value = weights * values;
+	//Corrector corrector;
+	//errorSum /= errors.size();
 
-	Activate(value);
-	Derive(value);
+	/*
+	for each weight: error[]=weights[]*errors[]/weightsSum
+	*/
 
-	return value;
+
+	/*Corrector corrector;
+
+	// Get average of all training examples
+	RowVectorXd weightAvverage = corrector.RepairAll(weights, inputs[0], targets[0]);
+	for (int i = 1; i < inputs.size(); i++) {
+		weightAvverage += corrector.RepairAll(weights, inputs[i], targets[i]);
+	}
+	weights += weightAvverage / inputs.size();
+
+	inputs.clear();
+	targets.clear();
+	targetSum = 0;*/
 }
+
diff --git a/NeuronNetwork/NeuronNetwork/Neuron.h b/NeuronNetwork/NeuronNetwork/Neuron.h
index 278038d..e9f213a 100644
--- a/NeuronNetwork/NeuronNetwork/Neuron.h
+++ b/NeuronNetwork/NeuronNetwork/Neuron.h
@@ -2,9 +2,9 @@
 #include <cmath>
 #include <vector>
 #include <Eigen/Dense>
-
-#define DEFAULT_WEIGHT 0.5
-#define DEFAULT_BIAS 2
+#include <time.h>
+#include "TemplateFunctions.h"
+#include "Define.h"
 
 class Layer;
 
@@ -15,7 +15,7 @@ class Neuron
 {
 public:
 	Neuron(const size_t layerHeight);
-	Neuron(const RowVectorXd& weights, const double bias) : weights(weights), bias(bias) {};
+	//Neuron(const RowVectorXd& weights) : weights(weights) {};
 
 	const double SetValues(const VectorXd& values);
 
@@ -24,20 +24,29 @@ public:
 	const RowVectorXd& GetWeights() const { return weights; }
 	const double GetActivatedValue() const { return activatedValue; }
 	void SetInitValue(const double value) { activatedValue = value; }
+	const bool IsActive() { return activatedValue > 0; }
+	void AddTargetValue(const double target);
+	void BackwardPropagate();
 
-private:
+	//=====================================
+
+	double sumOfWeights = 0;
 
-	// Fast Sigmoid Function
-	// f(x)  = x / ( 1 + |x| )
-	// f'(x) = f(x)*(1 - f(x))
-	void Activate(const double value);
-	void Derive(const double value);
+	//=========================================
 
+private:
 	double value = -1;
 	double activatedValue = -1;
-	double derivatedValue = -1;
 	const Layer* prevLayer = NULL;
 	RowVectorXd weights;
-	double bias = DEFAULT_BIAS;
+
+	double sumOfWeights=-1;
+
+	//learning data
+	vector<VectorXd> inputs;
+	double targetSum = 0;
+	vector<double> errors;
+	double errorSum = 0;
+	
 };
 
diff --git a/NeuronNetwork/NeuronNetwork/NeuronNetwork.aps b/NeuronNetwork/NeuronNetwork/NeuronNetwork.aps
new file mode 100644
index 0000000000000000000000000000000000000000..f666a890809eac57e0911157152c8e9b2c7521c6
GIT binary patch
literal 1464
zcmb7EO>fgc5PgC4z}JNXXSejwN;HaTRZwrK?FcK09VJd%BumD1Es`d+D?3)`|KP|E
z0)7P*KP0^MN1PA{Vy&@vXWpBgotZTNNR;_J6HZsgkgHwFJDlT`-}$_f+qbIu1Vhf0
z6MGQ*QEa!OyW43P)NM8Nk{dsm#<t;4PklFtyHR-UdsJMyV|U_4W0$5R3IhUWVKm;f
zszn~2aKj^t1q~N`>oB08jR90@%Gl~CV~hMaOd`*(@gOh$$$vluu*`os3cN{j<x4xW
zQ9x(4;m)(i#5|;Ijzu6=eF@?{b*uUOy1-?-1KX))8%wb>=}x2DcMR>qa(Y_d9P|{;
zIcu~B8sxf+N6U7_>~tE2nwyX6bpp6`=|C|Jt7o<seA=1Y(%ygH(F(fo{<Ms#$O0;;
zob{bEt=HF0!_keV2^r5((N)FiTMbKBuy-Ku;{cWPJmUS6O_q-jJke?%zNHqC`yvnX
zd~8(}oWN$CWB7>pw$bAE4!4*hM8JyK?12y-65J3IP%lsh=Tr8r50|<aU9v)4bM57b
zOIpU12_;Hr87oPVLGEEjmauNNMf4jJDYzczHHh2u`zf**J`&?HRtZgvxVvCV*uy8h
z#WD7|(>e74uIO);+FD#^r;^k2>a==fCdBx}3vDeG8E^O;<1k&@X-;f<5&p6mv&fo^
zZnXyA*J@y~;dDc34^%c<YTf0DmbI_{s6AY01s-^{R`2qz74;U`QYoAMHR2!egYrNB
z0CDdn?*DgA84IE55Vv=-k+Xik@5%X0P8olBw==JR==(3+uEdmmUtmOCr0O!JtV@CU
E1+?H2LjV8(

literal 0
HcmV?d00001

diff --git a/NeuronNetwork/NeuronNetwork/NeuronNetwork.cpp b/NeuronNetwork/NeuronNetwork/NeuronNetwork.cpp
index ebbac6f..fc37a81 100644
--- a/NeuronNetwork/NeuronNetwork/NeuronNetwork.cpp
+++ b/NeuronNetwork/NeuronNetwork/NeuronNetwork.cpp
@@ -10,6 +10,11 @@ using namespace std;
 
 int main()
 {
+	srand((unsigned)time(NULL));
+	cout << "Press 's' to start!" << endl;
+	//while (cin.get() != 's');
+
+
 	cout << "initiating NN....";
 	Network* network = new Network();
 	cout << "   done" << endl;
@@ -18,15 +23,43 @@ int main()
 	network->CreateNetwork(3, new int[3]{ 10,3,1 });
 	cout << "   done" << endl;
 
-	cout << "FeedForwarding....";
-	network->FeedForward();
-	cout << "   done" << endl;
+	do {
+		cout << "Training size 1000 minibatch....";
+		for (int i = 0; i < 10000; i++)
+		{
+			//cout << "FeedForwarding....";
+			network->FeedForward();
+			//cout << "   done" << endl;
+
+			double randomTargetValue = rand() % 2;
+			//cout << "Adding random target value: " << randomTargetValue << " ...." << endl;
+			network->AddTargetValue(randomTargetValue);
+			//cout << "   done" << endl;
+		}
+		cout << "Printing state of the net...." << endl;
+		network->PrintNet();
+		cout << "   done" << endl;
+
+		cout << "BackwardPropagating....";
+		network->BackPropagate();
+		cout << "   done" << endl;
+
+		cout << "Printing state of the net...." << endl;
+		network->PrintNet();
+		cout << "   done" << endl;
+
+		cout << "press '0' to stop Learning" << endl;
+	} while (cin.get() != '0');
+
+
 
 	cout << "Deleting NN....";
 	delete network;
 	cout << "   done" << endl;
 
 	cout << "Task completed" << endl;
+	cout << "Press 'q' to quit" << endl;
+	while (cin.get() != 'q');
 }
 
 // Run program: Ctrl + F5 or Debug > Start Without Debugging menu
diff --git a/NeuronNetwork/NeuronNetwork/NeuronNetwork.rc b/NeuronNetwork/NeuronNetwork/NeuronNetwork.rc
new file mode 100644
index 0000000000000000000000000000000000000000..8315507dd60e5cc8de1c5c063f18df784c73da92
GIT binary patch
literal 2676
zcmdUx-%c7)5XL`moA?eZ+>~gf*7N}skQfn-fUyY)G2mJnM8lt$CVg2ieTmlJob{wD
zyGtW_vF7CLIXj%0GxN<iv;6&2R#}0LHB?^{-D=7VHA8leY^u2~HR7$JO-7(=U1`jm
zAfK@gkWE-kZ_cjIKH$x)4RyzvZLRAQx0KylL^|fz5vK?4Y$2h)tsg45)23N;=R&7v
zw)b?ZmfGs5szaTqr!&=%R@pbz(03#)p2ylp(qQlJHy`phymx*LR0o$uj6+bm)eZ5_
zK*w-s6ZHkpL!zz_r{PgU{*}3yk^kr5-r*bRy~IC%h<T49cFe)bm=qHCYTWS1d%O8s
zy>}MfnED%$sr9v)%hx3_e}>_AaO0<A%_EX29C={=QYnQC(^zE<9cmjdDr|toJsrTU
zsIppAQHL6<z^)z~YcS02Z$^-**~Z2hOzxr8M8D*2Z6IwT?XcDzTfb=wD?1B5W#`k_
zevU^+Xjea6WvADy&#<jFcR(cj<UWztUDey!;CZBX(XPHccT&qjgqLYqk4V44?AeK|
zThjV>LCNk>P?s*$g|Sg5|L13xPrEDo^xB(>u2Ke6koAmcSk0&Pk0jp=on;o@uSWCL
zhvMEC2JvaE2$%NZB#v==`Py4#=8<3D`b9QoFaIKYEc1m!+#B;SoAqP9KlYz7`Tb4i
F{Rto}JU9RV

literal 0
HcmV?d00001

diff --git a/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj b/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj
index 2453e45..8e96c94 100644
--- a/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj
+++ b/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj
@@ -155,12 +155,17 @@
     </Link>
   </ItemDefinitionGroup>
   <ItemGroup>
+    <ClCompile Include="Corrector.cpp" />
     <ClCompile Include="Layer.cpp" />
     <ClCompile Include="Network.cpp" />
     <ClCompile Include="Neuron.cpp" />
     <ClCompile Include="NeuronNetwork.cpp" />
+    <ClCompile Include="TemplateFunctions.cpp" />
   </ItemGroup>
   <ItemGroup>
+    <ClInclude Include="Corrector.h" />
+    <ClInclude Include="Define.h" />
+    <ClInclude Include="TemplateFunctions.h" />
     <ClInclude Include="Layer.h" />
     <ClInclude Include="Network.h" />
     <ClInclude Include="Neuron.h" />
diff --git a/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj.filters b/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj.filters
index 1c8ac01..0f2f7b0 100644
--- a/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj.filters
+++ b/NeuronNetwork/NeuronNetwork/NeuronNetwork.vcxproj.filters
@@ -13,36 +13,60 @@
       <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
       <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
     </Filter>
-    <Filter Include="Source Files\network">
+    <Filter Include="Header Files\Templates">
+      <UniqueIdentifier>{ef541d2d-f2dd-411c-af78-faeb183e1940}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="Header Files\Network">
+      <UniqueIdentifier>{72b6673e-e6f3-46d4-ab56-149712f2225a}</UniqueIdentifier>
+    </Filter>
+    <Filter Include="Source Files\Network">
       <UniqueIdentifier>{6c96a808-235d-4b2a-9c4d-3ce71c50e7f3}</UniqueIdentifier>
     </Filter>
+    <Filter Include="Source Files\Template">
+      <UniqueIdentifier>{d501b795-37f7-4da7-84a9-75b4b39d9c62}</UniqueIdentifier>
+    </Filter>
   </ItemGroup>
   <ItemGroup>
     <ClCompile Include="NeuronNetwork.cpp">
       <Filter>Source Files</Filter>
     </ClCompile>
     <ClCompile Include="Neuron.cpp">
-      <Filter>Source Files\network</Filter>
+      <Filter>Source Files\Network</Filter>
     </ClCompile>
     <ClCompile Include="Network.cpp">
-      <Filter>Source Files\network</Filter>
+      <Filter>Source Files\Network</Filter>
     </ClCompile>
     <ClCompile Include="Layer.cpp">
-      <Filter>Source Files\network</Filter>
+      <Filter>Source Files\Network</Filter>
+    </ClCompile>
+    <ClCompile Include="TemplateFunctions.cpp">
+      <Filter>Source Files\Template</Filter>
+    </ClCompile>
+    <ClCompile Include="Corrector.cpp">
+      <Filter>Source Files\Network</Filter>
     </ClCompile>
   </ItemGroup>
   <ItemGroup>
-    <ClInclude Include="Neuron.h">
-      <Filter>Header Files</Filter>
-    </ClInclude>
     <ClInclude Include="NeuronNetwork.h">
       <Filter>Header Files</Filter>
     </ClInclude>
-    <ClInclude Include="Network.h">
-      <Filter>Header Files</Filter>
+    <ClInclude Include="Neuron.h">
+      <Filter>Header Files\Network</Filter>
     </ClInclude>
     <ClInclude Include="Layer.h">
-      <Filter>Header Files</Filter>
+      <Filter>Header Files\Network</Filter>
+    </ClInclude>
+    <ClInclude Include="Network.h">
+      <Filter>Header Files\Network</Filter>
+    </ClInclude>
+    <ClInclude Include="TemplateFunctions.h">
+      <Filter>Header Files\Templates</Filter>
+    </ClInclude>
+    <ClInclude Include="Define.h">
+      <Filter>Header Files\Templates</Filter>
+    </ClInclude>
+    <ClInclude Include="Corrector.h">
+      <Filter>Header Files\Network</Filter>
     </ClInclude>
   </ItemGroup>
   <ItemGroup>
diff --git a/NeuronNetwork/NeuronNetwork/TemplateFunctions.cpp b/NeuronNetwork/NeuronNetwork/TemplateFunctions.cpp
new file mode 100644
index 0000000..0b223ce
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/TemplateFunctions.cpp
@@ -0,0 +1,33 @@
+#include "TemplateFunctions.h"
+
+const double Activate(const double value, ActivationFunction function)
+{
+	switch (function)
+	{
+	case Sigmoid:
+		return value / (1 + abs(value));
+	case Tangh:
+		break;
+	case RLU:
+		break;
+	case LeakRLU:
+		break;
+	}
+	return -1.0;
+}
+
+const double Derive(const double value, ActivationFunction function)
+{
+	switch (function)
+	{
+	case Sigmoid:
+		return value * (1 - value);
+	case Tangh:
+		break;
+	case RLU:
+		break;
+	case LeakRLU:
+		break;
+	}
+	return -1.0;
+}
diff --git a/NeuronNetwork/NeuronNetwork/TemplateFunctions.h b/NeuronNetwork/NeuronNetwork/TemplateFunctions.h
new file mode 100644
index 0000000..6a2388d
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/TemplateFunctions.h
@@ -0,0 +1,20 @@
+#pragma once
+#include <cmath>
+#include <vector>
+
+using namespace std;
+
+enum ActivationFunction
+{
+	// f(x)  = x / ( 1 + |x| )
+	// f'(x) = f(x)*(1 - f(x))
+	Sigmoid,
+	Tangh,
+	RLU,
+	LeakRLU
+};
+
+const double Activate(const double value, ActivationFunction function);
+const double Derive(const double value, ActivationFunction function);
+
+template<typename T> T RecursiveAddition(const vector<T> elements);
diff --git a/NeuronNetwork/NeuronNetwork/resource.h b/NeuronNetwork/NeuronNetwork/resource.h
new file mode 100644
index 0000000..cd3f5f9
--- /dev/null
+++ b/NeuronNetwork/NeuronNetwork/resource.h
@@ -0,0 +1,14 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by NeuronNetwork.rc
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        101
+#define _APS_NEXT_COMMAND_VALUE         40001
+#define _APS_NEXT_CONTROL_VALUE         1001
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
-- 
GitLab