Click here to Skip to main content
15,891,316 members
Articles / Multimedia / GDI+

C# Application to Create and Recognize Mouse Gestures (.NET)

Rate me:
Please Sign up or sign in to vote.
4.82/5 (39 votes)
17 Mar 2008CPOL5 min read 221.7K   8.1K   144  
This program can create and recognize mouse gestures.
using System;
using System.Collections.Generic;
using System.Text;

namespace NeuralNetworks.Training
{
	public class BackPropagationLearning : ISupervisedLearning
	{
		public event OnEpochEndedDelegate EpochEnded;
		private NeuralNetwork m_Network;
		private double[] m_LearningRates;
		private double m_Momentum = 0.0f;
		private double m_ErrorLimit = 0.1f;
		private bool m_Converged = false;
		private double[][] m_LayersErrors;
		private double[][][] m_LayersDeltaWeights;

		public double ErrorLimit { get { return m_ErrorLimit; } }
		public double[] LearningRates { get { return m_LearningRates; } set { m_LearningRates = value; } }
		public double Momentum { get { return m_Momentum; } set { m_Momentum = value; } }
		public bool Converged { get { return m_Converged; } }

		public BackPropagationLearning(NeuralNetwork net, double momentum, double learning_rate, double error_limit)
		{
			double[] learning_rates=new double[net.Layers];
			for (int i = 0; i < net.Layers; i++)
				learning_rates[i] = learning_rate;

			Initialize(net, momentum, learning_rates, error_limit);
		}

		public BackPropagationLearning(NeuralNetwork net, double momentum, double[] learning_rates, double error_limit)
		{
			Initialize(net, momentum, learning_rates, error_limit);
		}

		private void Initialize(NeuralNetwork net, double momentum, double[] learning_rates, double error_limit)
		{
			if (net == null)
				throw new ArgumentException("Neural network must not be null", "net");
			else if (learning_rates.Length != net.Layers)
				throw new ArgumentException("Learning rates must be as many as network layers", "learning_rates");

			m_Network = net;
			m_Momentum = momentum;
			m_LearningRates = learning_rates;
			m_ErrorLimit = error_limit;

			m_LayersErrors = new double[net.Layers][];
			m_LayersDeltaWeights = new double[net.Layers][][];

			for (int i = 0; i < net.Layers; i++)
			{
				Layer layer = net[i];
				m_LayersErrors[i] = new double[layer.Neurons];
				m_LayersDeltaWeights[i] = new double[layer.Neurons][];

				for (int j = 0; j < layer.Neurons; j++)
					m_LayersDeltaWeights[i][j] = new double[layer.NeuronsSynapses + 1];
			}
		}

		public double Train(double[][] inputs, double[] output, int epochs, out int cycles)
		{
			if (inputs == null)
				throw new ArgumentException("Inputs must not be null", "inputs");
			else if (output == null)
				throw new ArgumentException("Outputs must not be null", "output");
			else if (epochs < 1)
				throw new ArgumentException("Epochs must be at least 1", "epochs");

			int i, j;
			double error = 0;
			m_Converged = false;
			for (i = 0; i < epochs && !m_Converged; i++)
			{
				error = 0;
				for (j = 0; j < inputs.Length; j++)
					error += Train(inputs[j], output);

				error /= inputs.Length;

				m_Converged = (error < m_ErrorLimit);

				if (EpochEnded != null)
					EpochEnded(error);
			}

			cycles = i;	//So we can now how many cycles have been computed
			return error;
		}

		public double Train(double[][] inputs, double[][] outputs, int epochs, out int cycles)
		{
			if (inputs == null)
				throw new ArgumentException("Inputs must not be null", "inputs");
			else if (outputs == null)
				throw new ArgumentException("Outputs must not be null", "outputs");
			else if(inputs.Length!=outputs.Length)
				throw new ArgumentException("Outputs length must match inputs length", "outputs");
			else if (epochs < 1)
				throw new ArgumentException("Epochs must be at least 1", "epochs");

			int i, j;
			double error = 0;
			m_Converged = false;
			for (i = 0; i < epochs && !m_Converged; i++)
			{
				error = 0;
				for (j = 0; j < inputs.Length; j++)
					error += Train(inputs[j], outputs[j]);

				error /= inputs.Length; //Mean of mean square errors

				m_Converged = (error < m_ErrorLimit);

				if (EpochEnded != null)
					EpochEnded(error);
			}

			cycles = i;	//So we can now how many cycles have been computed
			return error;
		}

		public double Train(double[] input, double[] output)
		{
			if (input == null)
				throw new ArgumentException("Input must not be null", "input");
			else if (output == null)
				throw new ArgumentException("Output must not be null", "output");
			else if (input.Length != m_Network.Inputs)
				throw new ArgumentException("Input cannot be used as network input", "input");
				
			m_Network.Compute(input);

			double training_error = CalculateLayersErrors(output);

			CalculateWeightsUpdates(input);

			UpdateNetworkWeights();

			return training_error;
		}

		private double CalculateLayersErrors(double[] expected_output)
		{
			Layer layer;
			int i, j, k, output_layer_index = m_Network.Layers - 1, next_index;
			double training_error = 0, g_value, derivate;

			for (i = output_layer_index; i >= 0; i--)
			{
				layer = m_Network[i];

				for (j = 0; j < layer.Neurons; j++)
				{
					if (i == output_layer_index)	//If computing error for output layer, g is the neuron error
					{
						g_value = expected_output[j] - layer[j].Output;
						training_error += (g_value * g_value);
					}
					else //If computing error for every other layer, g for layer 'i' is computed as Sum(layers[i+1].errors[k]*layers[i+1].neuron[k].weights[j]) for each k = [0, layers[i+1].neurons], j = [0, layers[i].neurons]
					{
						g_value = 0;
						next_index = i + 1;
						for (k = 0; k < m_Network[next_index].Neurons; k++)
							g_value += m_LayersErrors[next_index][k] * m_Network[next_index][k][j];
					}

					derivate = layer[j].Function.ComputeDerivateByOutput(layer[j].Output);
					m_LayersErrors[i][j] = derivate * g_value;
				}
			}

            return training_error / m_Network.OutputLayer.Neurons;  //Mean squared errors
		}

		private void CalculateWeightsUpdates(double[] input)
		{
			int i, j, k, bias_weight_index;
			Layer layer;
			double[] neuron_deltas, layer_input = input;
			double new_delta;

			for (i = 0; i < m_Network.Layers; i++)
			{
				layer = m_Network[i];

				for (j = 0; j < layer.Neurons; j++)
				{
					neuron_deltas = m_LayersDeltaWeights[i][j];
					bias_weight_index = layer[j].Synapses;

					for (k = 0; k < layer[j].Synapses + 1; k++)
					{
						if (k != bias_weight_index)
							new_delta = m_LayersErrors[i][j] * layer_input[k];
						else
							new_delta = m_LayersErrors[i][j] * layer[j].Bias;	//Bias weight update

						neuron_deltas[k] = m_Momentum * neuron_deltas[k] + (1 - m_Momentum) * new_delta;
						neuron_deltas[k] *= m_LearningRates[i];
					}
				}

				layer_input = m_Network[i].Output;	//Next input is current output...
			}
		}

		private void UpdateNetworkWeights()
		{
			int i, j, k;
			Layer layer;

			for (i = 0; i < m_Network.Layers; i++)
			{
				layer = m_Network[i];

				for (j = 0; j < layer.Neurons; j++)
				{
					for (k = 0; k < layer[j].Synapses + 1; k++)
						layer[j][k] += m_LayersDeltaWeights[i][j][k];
				}
			}
		}
	}
}

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)


Written By
Software Developer (Senior) Apex s.r.l.
Italy Italy
I got my Computer Science (Engineering) Master's Degree at the Siena University (Italy), but I'm from Rieti (a small town next to Rome).
My hobbies are RPG, MMORGP, programming and 3D graphics.
At the moment I'm employed at Apex s.r.l. (Modena, Italy) as a senior software developer, working for a WPF/WCF project in Rome.

Comments and Discussions