using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.Reflection;
using System.Common;
namespace NeuralNetworks
{
public class Neuron
{
private static Random m_Randomizer = new Random(DateTime.Now.Millisecond);
public event OnOutputComputed OutputComputed;
public event OnIOActivityProgress IOProgress;
private int m_Synapses;
private IActivationFunction m_Function;
private double[] m_Weights;
private double m_Bias;
private double m_Output = 0;
public int Synapses { get { return m_Synapses; } }
public IActivationFunction Function { get { return m_Function; } }
public int Weights { get { return m_Weights != null ? m_Weights.Length : 0; } }
public double Bias { get { return m_Bias; } set { m_Bias = value; } }
public double Output { get { return m_Output; } }
public double this[int index] { get { return m_Weights[index]; } set { m_Weights[index] = value; } }
public Neuron(string full_path)
{
try
{
XmlTextReader reader = new XmlTextReader(full_path);
Deserialize(reader, null);
reader.Close();
}
catch (Exception exc)
{
Console.WriteLine("Exception caught while creating neural net layer data.\n\tSource: " + exc.Source + "\n\tMessage: " + exc.Message);
}
}
public Neuron(XmlReader reader)
{
Deserialize(reader, null);
}
public Neuron(XmlReader reader, IActivationFunction function)
{
Deserialize(reader, function);
}
public Neuron(int synapses, double bias)
: this(synapses, bias, new ActivationFunctions.LinearFunction())
{
}
public Neuron(int synapses, double bias, IActivationFunction function, double[] weights)
{
Initialize(synapses, bias, function, weights);
}
public Neuron(int synapses, double bias, IActivationFunction function)
{
Initialize(synapses, bias, function);
}
private void Initialize(int synapses, double bias, IActivationFunction function, double[] weights)
{
if (synapses < 1)
throw new ArgumentException("Synapses must be a strictly positive value", "synapses");
else if (function == null)
throw new ArgumentException("Function cannot be null", "function");
else if (weights == null)
throw new ArgumentException("Weights cannot be null", "weights");
else if (weights.Length != synapses + 1)
throw new ArgumentException("Weights must be as much as neuron synapses plus bias", "weights");
m_Synapses = synapses;
m_Function = function;
m_Weights = new double[weights.Length];
weights.CopyTo(m_Weights, 0);
}
private void Initialize(int synapses, double bias, IActivationFunction function)
{
if (synapses < 1)
throw new ArgumentException("Synapses must be a strictly positive value", "synapses");
else if (function == null)
throw new ArgumentException("Function cannot be null", "function");
m_Synapses = synapses;
m_Function = function;
m_Bias = bias;
m_Weights = new double[synapses + 1];
for (int i = 0; i < synapses + 1; i++)
m_Weights[i] = m_Randomizer.NextDouble(); //Every weight is initialized in the interval [0,1]
}
public double Compute(double[] input)
{
if (input.Length != m_Synapses)
throw new ArgumentException("Inputs array length mismatch", "inputs");
double ret = 0;
for (int i = 0; i < input.Length; i++)
ret += m_Weights[i] * input[i];
ret += m_Weights[input.Length] * m_Bias;
m_Output = m_Function.Compute(ret);
m_Output = OnOutputComputed(m_Output);
if (OutputComputed != null)
OutputComputed(input, m_Output);
return m_Output;
}
//This function can be used to implement threashold.
protected virtual double OnOutputComputed(double output)
{
return output;
}
public override string ToString()
{
string s = "[";
if (m_Weights != null)
{
for (int i = 0; i < m_Weights.Length; i++)
s += (i != 0 ? (i != m_Synapses ? " " : " | ") : "") + m_Weights[i].ToString();
}
else
s += "NULL";
s += "] (Bias: " + m_Bias.ToString() + ")\t" + m_Function.ToString();
return s;
}
public override bool Equals(object obj)
{
if (obj == this)
return true;
else
{
Neuron n = obj as Neuron;
if (n == null)
return false;
if (n.m_Weights.Length != m_Weights.Length)
return false;
if (n.m_Bias != m_Bias)
return false;
if (!n.m_Function.Equals(m_Function))
return false;
for (int i = 0; i < m_Weights.Length; i++)
{
if (m_Weights[i] != n.m_Weights[i])
return false;
}
return true;
}
}
public override int GetHashCode()
{
int hash = 0;
for (int i = 0; i < m_Weights.Length; i++)
hash += m_Weights[i].GetHashCode();
hash += m_Function.GetHashCode() + m_Bias.GetHashCode();
return hash;
}
public void Serialize(XmlWriter writer, bool function_also)
{
writer.WriteStartElement("neuron");
writer.WriteAttributeString("inputs", m_Synapses.ToString());
writer.WriteAttributeString("bias", m_Bias.ToString());
writer.WriteAttributeString("weights", m_Weights.Length.ToString());
if (IOProgress != null)
IOProgress("Neuron main data saved...");
if (function_also)
{
writer.WriteStartElement("function");
writer.WriteAttributeString("type", m_Function.GetType().ToString());
m_Function.SerializeParameters(writer);
writer.WriteEndElement();
if (IOProgress != null)
IOProgress("Neuron function saved...");
}
for (int i = 0; i < m_Weights.Length; i++)
{
writer.WriteStartElement("weight");
writer.WriteAttributeString("value", m_Weights[i].ToString());
if (i == m_Synapses)
writer.WriteAttributeString("is_bias_weight", true.ToString());
writer.WriteEndElement();
if (IOProgress != null)
IOProgress("Neuron weight " + (i + 1).ToString() + "/" + m_Weights.Length.ToString() + " saved...");
}
writer.WriteEndElement();
}
public void Deserialize(XmlReader reader)
{
Deserialize(reader, null);
}
public void Deserialize(XmlReader reader, IActivationFunction function)
{
try
{
int inputs = 0;
double bias = 0;
double[] weights = null;
int i = 0;
while (reader.Read())
{
switch (reader.NodeType)
{
case XmlNodeType.Element:
{
if (reader.Name == "neuron")
{
inputs = int.Parse(reader["inputs"]);
bias = double.Parse(reader["bias"]);
weights = new double[int.Parse(reader["weights"])];
if (IOProgress != null)
IOProgress("Neuron main data loaded...");
}
else if (reader.Name == "weight")
{
weights[i] = double.Parse(reader["value"]);
i++;
if (IOProgress != null)
IOProgress("Neuron weight " + i.ToString() + "/" + weights.Length.ToString() + " loaded...");
}
else if (reader.Name == "function")
{
if (function == null) //If function is not null, function was passed as parameter
{
Type f_type = Type.GetType(reader["type"]);
ConstructorInfo[] constructors = f_type.GetConstructors();
function = (IActivationFunction)constructors[0].Invoke(null);
function.DeserializeParameters(reader);
if (IOProgress != null)
IOProgress("Neuron function loaded...");
}
}
break;
}
case XmlNodeType.EndElement:
{
if (reader.Name == "neuron")
{
Initialize(inputs, bias, function != null ? function : new ActivationFunctions.LinearFunction(), weights);
return;
}
break;
}
}
}
}
catch (Exception exc)
{
Console.WriteLine("Exception caught while deserializing neuron data.\n\tSource: " + exc.Source + "\n\tMessage: " + exc.Message);
Initialize(1, 0, function != null ? function : new ActivationFunctions.LinearFunction());
}
}
public bool Save(string full_path)
{
try
{
XmlTextWriter writer = new XmlTextWriter(full_path, Encoding.UTF8);
writer.Formatting = Formatting.Indented;
writer.WriteStartDocument();
Serialize(writer, true);
writer.WriteEndDocument();
writer.Close();
return true;
}
catch (Exception exc)
{
Console.WriteLine("Exception caught while saving neural net neuron data.\n\tSource: " + exc.Source + "\n\tMessage: " + exc.Message);
return false;
}
}
public bool Load(string full_path)
{
try
{
XmlTextReader reader = new XmlTextReader(full_path);
Deserialize(reader, null);
reader.Close();
return true;
}
catch (Exception exc)
{
Console.WriteLine("Exception caught while loading neural net layer data.\n\tSource: " + exc.Source + "\n\tMessage: " + exc.Message);
return false;
}
}
}
}