|
/*##########################################################################
*
* BP1Layer.cs
* -------------------------------------------------------------------------
* By
* Murat FIRAT, June 2007
* muratti24@gmail.com
*
* -------------------------------------------------------------------------
* Last Update:
* July,4th 2007
*
* -------------------------------------------------------------------------
* Description:
* BP1Layer.cs Implements Single Layer Backpropagation Neural Network
*
* -------------------------------------------------------------------------
* Notes:
* If training process takes too long, modify (mostly increase) learning rate
* and initial weight.
*
* To modify initial weight, change x(0<x<1) and y(0<y<100) in the following
* code segment(in InitializeNetwork() function):
*
* PreInputLayer[i].Weights[j] = x +((double)rand.Next(0, y) / 100);
*
* -------------------------------------------------------------------------
###########################################################################*/
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections;
using System.IO;
using System.Runtime.Serialization.Formatters.Binary;
using ANNBase;
namespace ANN
{
class BP1Layer
{
private PreInput[] PreInputLayer;
private Output[] OutputLayer;
private int PreInputNum;
private int OutputNum;
private ArrayList OutputSet;
private double LearningRate = 0.2;
private double currentError=99999;
private bool stopTraining = false;
public BP1Layer(int preInputNum, int outputNum)
{
PreInputNum = preInputNum;
OutputNum = outputNum;
PreInputLayer = new PreInput[PreInputNum];
OutputLayer = new Output[OutputNum];
OutputSet = new ArrayList();
InitializeNetwork();
}
public void Train(ArrayList TrainingInputs,ArrayList TrainingOutputs,double MaxError)
{
foreach (string s in TrainingOutputs)
{
if (!OutputSet.Contains(s))
{
OutputSet.Add(s);
}
}
stopTraining = false;
do
{
for (int i = 0; i < TrainingInputs.Count; i++)
{
ForwardPropagate((double[])TrainingInputs[i], (string)TrainingOutputs[i]);
BackPropagate();
}
currentError = GetTotalError(TrainingInputs, TrainingOutputs);
} while (currentError > MaxError && !stopTraining);
}
//Apply Input to Network And Find the [Two] Highest Outputs Which are the [Two] Best Matched
public void Recognize(double[] pattern, ref string MatchedHigh, ref double OutputValueHight,
ref string MatchedLow, ref double OutputValueLow)
{
int i, j;
double total = 0.0;
double max = -1;
//Apply Input to Network
for (i = 0; i < PreInputNum; i++)
{
PreInputLayer[i].Value = pattern[i];
}
//Find the [Two] Highest Outputs
for (i = 0; i < OutputNum; i++)
{
total = 0.0;
for (j = 0; j < PreInputNum; j++)
{
total += PreInputLayer[j].Value * PreInputLayer[j].Weights[i];
}
OutputLayer[i].InputSum = total;
OutputLayer[i].output = F(total);
if (OutputLayer[i].output > max)
{
MatchedLow = MatchedHigh;
OutputValueLow = max;
max = OutputLayer[i].output;
MatchedHigh = (string)OutputSet[i];
OutputValueHight = max;
}
}
}
private double F(double x) { return (1 / (1 + Math.Exp(-x))); }
public double GetTotalError(ArrayList TrainingInputs, ArrayList TrainingOutputs)
{
double total = 0.0;
for (int i = 0; i < TrainingInputs.Count; i++)
{
ForwardPropagate((double[])TrainingInputs[i], (string)TrainingOutputs[i]);
for (int j = 0; j < OutputNum; j++)
{
total += Math.Pow((OutputLayer[j].Target - OutputLayer[j].output), 2) / 2;
}
}
return total;
}
public void SaveNetwork(string FileName,int AvHeight, int AvWidth)
{
ArrayList AL = new ArrayList();
AL.Add("1Layer");
AL.Add(OutputSet);
AL.Add(PreInputLayer);
AL.Add(OutputLayer);
AL.Add(AvHeight);
AL.Add(AvWidth);
FileStream FS = new FileStream(FileName, FileMode.OpenOrCreate);
BinaryFormatter BF = new BinaryFormatter();
BF.Serialize(FS,AL);
FS.Close();
}
public void LoadNetwork(string FileName, ref int AvHeight,ref int AvWidth)
{
FileStream FS = null;
try
{
FS = new FileStream(FileName, FileMode.Open);
BinaryFormatter BF = new BinaryFormatter();
ArrayList AL = (ArrayList)BF.Deserialize(FS);
string Ver = (string)AL[0];
if (Ver != "1Layer")
{
throw new Exception("The Loaded Network Does Not Belong To 1 Layer Network But Belongs to " + Ver);
}
OutputSet = (ArrayList)AL[1];
PreInputLayer = (PreInput[])AL[2];
OutputLayer = (Output[])AL[3];
AvHeight = (int)AL[4];
AvWidth = (int)AL[5];
PreInputNum = PreInputLayer.Length;
OutputNum = OutputLayer.Length;
}
finally
{
FS.Close();
}
}
//Initialize the Network Weights to Small Values (between 0 and 1)
private void InitializeNetwork()
{
Random rand = new Random();
for (int i = 0; i < PreInputNum; i++)
{
PreInputLayer[i].Weights = new double[OutputNum];
for (int j = 0; j < OutputNum; j++)
{
PreInputLayer[i].Weights[j] = 0.01 +((double)rand.Next(0, 2) / 100);
}
}
}
private void BackPropagate()
{
//Update The First Layer's Weights
for (int j = 0; j < OutputNum; j++)
{
for (int i = 0; i < PreInputNum; i++)
{
PreInputLayer[i].Weights[j] += LearningRate * (OutputLayer[j].Error) * PreInputLayer[i].Value;
}
}
}
private void ForwardPropagate(double[] pattern, string output)
{
int i, j;
double total = 0.0;
//Apply input to the network
for (i = 0; i < PreInputNum; i++)
{
PreInputLayer[i].Value = pattern[i];
}
//Calculate The First(Output) Layer's Inputs, Outputs, Targets and Errors
for (i = 0; i < OutputNum; i++)
{
total = 0.0;
for (j = 0; j < PreInputNum; j++)
{
total += PreInputLayer[j].Value * PreInputLayer[j].Weights[i];
}
OutputLayer[i].InputSum = total;
OutputLayer[i].output = F(total);
OutputLayer[i].Target = (((string)OutputSet[i]) == output ? 1.0 : 0.0);
OutputLayer[i].Error = (OutputLayer[i].Target - OutputLayer[i].output) * (OutputLayer[i].output) * (1 - OutputLayer[i].output);
}
}
public double CurrentError
{
get
{
return currentError;
}
}
public bool StopTraining
{
set
{
stopTraining = value;
}
}
}
}
|
By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.
If a file you wish to view isn't highlighted, and is a text file (not binary), please
let us know and we'll add colourisation support for it.
Has BS degree on computer science, working as software engineer in istanbul.