Click here to Skip to main content
Click here to Skip to main content
Articles » Languages » C# » General » Downloads
 
Add your own
alternative version

Image Recognition with Neural Networks

, 30 Oct 2007 CPOL
This article contains a brief description of BackPropagation Artificial Neural Network and its implementation for Image Recognition
backpropagation2_demo.zip
BackPropagation.exe
ICONS
adobe.bmp
mozilla.bmp
ms word.bmp
netbeans.bmp
notepad.bmp
quest.bmp
studio.bmp
Thumbs.db
winamp.bmp
PATTERNS
0.bmp
1.bmp
2.bmp
3.bmp
4.bmp
5.bmp
6.bmp
7.bmp
8.bmp
9.bmp
A.bmp
B.bmp
C.bmp
D.bmp
E.bmp
F.bmp
G.bmp
H.bmp
I.bmp
J.bmp
K.bmp
L.bmp
M.bmp
N.bmp
O.bmp
P.bmp
Q.bmp
R.bmp
S.bmp
T.bmp
Thumbs.db
U.bmp
V.bmp
W.bmp
X.bmp
Y.bmp
Z.bmp
BackPropagation2_src.zip
Properties
Settings.settings
BackPropagation.suo
bin
Debug
06_19_BackPropagation.exe
06_19_BackPropagation.vshost.exe
ICONS
adobe.bmp
mozilla.bmp
ms word.bmp
netbeans.bmp
notepad.bmp
quest.bmp
studio.bmp
Thumbs.db
winamp.bmp
PATTERNS
0.bmp
1.bmp
2.bmp
3.bmp
4.bmp
5.bmp
6.bmp
7.bmp
8.bmp
9.bmp
A.bmp
B.bmp
C.bmp
D.bmp
E.bmp
F.bmp
G.bmp
H.bmp
I.bmp
J.bmp
K.bmp
L.bmp
M.bmp
N.bmp
O.bmp
P.bmp
Q.bmp
R.bmp
S.bmp
T.bmp
Thumbs.db
U.bmp
V.bmp
W.bmp
X.bmp
Y.bmp
Z.bmp
Lib
backpropagation_demo.zip
06_19_BackPropagation.exe
0
0.bmp
Thumbs.db
1
1.bmp
Thumbs.db
2
2.bmp
Thumbs.db
3
3.bmp
Thumbs.db
4
4.bmp
Thumbs.db
5
5.bmp
Thumbs.db
6
6.bmp
Thumbs.db
7
7.bmp
Thumbs.db
8
8.bmp
Thumbs.db
9
9.bmp
Thumbs.db
A
A.bmp
Thumbs.db
B
B.bmp
Thumbs.db
C
C.bmp
Thumbs.db
D
D.bmp
Thumbs.db
E
E.bmp
Thumbs.db
F
F.bmp
Thumbs.db
G
G.bmp
Thumbs.db
H
H.bmp
Thumbs.db
I
I.bmp
Thumbs.db
J
J.bmp
Thumbs.db
K
K.bmp
Thumbs.db
L
L.bmp
Thumbs.db
M
M.bmp
Thumbs.db
N
N.bmp
Thumbs.db
O
O.bmp
Thumbs.db
P
P.bmp
Thumbs.db
Q
Q.bmp
Thumbs.db
R
R.bmp
Thumbs.db
S
S.bmp
Thumbs.db
T
T.bmp
Thumbs.db
Thumbs.db
U
Thumbs.db
U.bmp
V
Thumbs.db
V.bmp
W
Thumbs.db
W.bmp
X
Thumbs.db
X.bmp
Y
Thumbs.db
Y.bmp
Z
Thumbs.db
Z.bmp
Adobe
adobe reader.bmp
Thumbs.db
Mozilla
Thumbs.db
untitled.bmp
NetB
Thumbs.db
untitled6.bmp
NoteP
Thumbs.db
untitled4.bmp
Office
ms word.bmp
Thumbs.db
Printer
Thumbs.db
untitled.bmp
Quest
Thumbs.db
untitled5.bmp
Studio
Thumbs.db
untitled3.bmp
Winamp
Thumbs.db
untitled2.bmp
Winrar
Thumbs.db
untitled.bmp
BackPropagation_src.zip
BackPropagation.suo
06_19_BackPropagation.exe
06_19_BackPropagation.vshost.exe
Adobe
adobe reader.bmp
Thumbs.db
Mozilla
Thumbs.db
untitled.bmp
NetB
Thumbs.db
untitled6.bmp
NoteP
Thumbs.db
untitled4.bmp
Office
ms word.bmp
Thumbs.db
Printer
Thumbs.db
untitled.bmp
Quest
Thumbs.db
untitled5.bmp
Studio
Thumbs.db
untitled3.bmp
Winamp
Thumbs.db
untitled2.bmp
Winrar
Thumbs.db
untitled.bmp
0
0.bmp
Thumbs.db
1
1.bmp
Thumbs.db
2
2.bmp
Thumbs.db
3
3.bmp
Thumbs.db
4
4.bmp
Thumbs.db
5
5.bmp
Thumbs.db
6
6.bmp
Thumbs.db
7
7.bmp
Thumbs.db
8
8.bmp
Thumbs.db
9
9.bmp
Thumbs.db
A
A.bmp
Thumbs.db
B
B.bmp
Thumbs.db
C
C.bmp
Thumbs.db
D
D.bmp
Thumbs.db
E
E.bmp
Thumbs.db
F
F.bmp
Thumbs.db
G
G.bmp
Thumbs.db
H
H.bmp
Thumbs.db
I
I.bmp
Thumbs.db
J
J.bmp
Thumbs.db
K
K.bmp
Thumbs.db
L
L.bmp
Thumbs.db
M
M.bmp
Thumbs.db
N
N.bmp
Thumbs.db
O
O.bmp
Thumbs.db
P
P.bmp
Thumbs.db
Q
Q.bmp
Thumbs.db
R
R.bmp
Thumbs.db
S
S.bmp
Thumbs.db
T
T.bmp
Thumbs.db
Thumbs.db
U
Thumbs.db
U.bmp
V
Thumbs.db
V.bmp
W
Thumbs.db
W.bmp
X
Thumbs.db
X.bmp
Y
Thumbs.db
Y.bmp
Z
Thumbs.db
Z.bmp
Settings.settings
bpsimplified_demo.zip
sample.net
Adobe.bmp
Mozilla.bmp
NBeans.bmp
Npad.bmp
Quest.bmp
VStudio.bmp
Winamp.bmp
Word.bmp
0.bmp
1.bmp
2.bmp
3.bmp
4.bmp
5.bmp
6.bmp
7.bmp
8.bmp
9.bmp
A.bmp
B.bmp
C.bmp
D.bmp
E.bmp
F.bmp
G.bmp
H.bmp
I.bmp
J.bmp
K.bmp
L.bmp
M.bmp
N.bmp
O.bmp
P.bmp
Q.bmp
R.bmp
S.bmp
T.bmp
U.bmp
V.bmp
W.bmp
X.bmp
Y.bmp
Z.bmp
BPSimplified.exe
bpsimplified_src.zip
BPSimplified
bin
Debug
BPSimplified.exe
ICONS
Adobe.bmp
Mozilla.bmp
NBeans.bmp
Npad.bmp
Quest.bmp
VStudio.bmp
Winamp.bmp
Word.bmp
PATTERNS
0.bmp
1.bmp
2.bmp
3.bmp
4.bmp
5.bmp
6.bmp
7.bmp
8.bmp
9.bmp
A.bmp
B.bmp
C.bmp
D.bmp
E.bmp
F.bmp
G.bmp
H.bmp
I.bmp
J.bmp
K.bmp
L.bmp
M.bmp
N.bmp
O.bmp
P.bmp
Q.bmp
R.bmp
S.bmp
T.bmp
U.bmp
V.bmp
W.bmp
X.bmp
Y.bmp
Z.bmp
sample.net
Lib
Properties
Settings.settings
#region Copyright (c), Some Rights Reserved
/*##########################################################################
 * 
 * BP3Layer.cs
 * -------------------------------------------------------------------------
 * By
 * Murat FIRAT, June 2007
 * 
 * -------------------------------------------------------------------------
 * Description:
 * BP3Layer.cs Implements Three Layer Backpropagation Neural Network
 * 
 * -------------------------------------------------------------------------
 * Notes:
 * If training process takes too long, modify (mostly increase) learning rate 
 * and initial weight.
 * 
 * To modify initial weight, change x(0<x<1) and y(0<y<100) in the following
 * code segment(in InitializeNetwork(..) function):
 * 
 * PreInputLayer[i].Weights[j] = x +((double)rand.Next(0, y) / 100);                  
 * InputLayer[i].Weights[j] = x + ((double)rand.Next(0, y) / 100);
 * HiddenLayer[i].Weights[j] = x + ((double)rand.Next(0, y) / 100);
 * 
 * -------------------------------------------------------------------------
 ###########################################################################*/
#endregion

using System;
using System.Collections.Generic;
using System.Text;

namespace BPSimplified.Lib
{
    [Serializable]
    class BP3Layer<T> : IBackPropagation<T> where T : IComparable<T>
    {
        private int PreInputNum;
        private int InputNum;
        private int HiddenNum;
        private int OutputNum;

        private PreInput[] PreInputLayer;
        private Input[] InputLayer;
        private Hidden[] HiddenLayer;
        private Output<T>[] OutputLayer;

        private double learningRate = 0.2;
                
        public BP3Layer(int preInputNum, int inputNum, int hiddenNum, int outputNum)
        {
            PreInputNum = preInputNum;
            InputNum = inputNum;
            HiddenNum = hiddenNum;
            OutputNum = outputNum;

            PreInputLayer = new PreInput[PreInputNum];
            InputLayer = new Input[InputNum];
            HiddenLayer = new Hidden[HiddenNum];
            OutputLayer = new Output<T>[OutputNum];          
        }

        #region IBackPropagation<T> Members

        public void BackPropagate()
        {
            int i, j;
            double total;

            //Fix Hidden Layer's Error
            for (i = 0; i < HiddenNum; i++)
            {
                total = 0.0;
                for (j = 0; j < OutputNum; j++)
                {
                    total += HiddenLayer[i].Weights[j] * OutputLayer[j].Error;
                }
                HiddenLayer[i].Error = total;
            }

            //Fix Input Layer's Error
            for (i = 0; i < InputNum; i++)
            {
                total = 0.0;
                for (j = 0; j < HiddenNum; j++)
                {
                    total += InputLayer[i].Weights[j] * HiddenLayer[j].Error;
                }
                InputLayer[i].Error = total;
            }

            //Update The First Layer's Weights
            for (i = 0; i < InputNum; i++)
            {
                for (j = 0; j < PreInputNum; j++)
                {
                    PreInputLayer[j].Weights[i] +=
                        learningRate * InputLayer[i].Error * PreInputLayer[j].Value;
                }
            }

            //Update The Second Layer's Weights
            for (i = 0; i < HiddenNum; i++)
            {
                for (j = 0; j < InputNum; j++)
                {
                    InputLayer[j].Weights[i] +=
                        learningRate * HiddenLayer[i].Error * InputLayer[j].Output;
                }
            }

            //Update The Third Layer's Weights
            for (i = 0; i < OutputNum; i++)
            {
                for (j = 0; j < HiddenNum; j++)
                {
                    HiddenLayer[j].Weights[i] +=
                        learningRate * OutputLayer[i].Error * HiddenLayer[j].Output;
                }
            }
        }

        public double F(double x)
        {
            return (1 / (1 + Math.Exp(-x)));
        }

        public void ForwardPropagate(double[] pattern, T output)
        {
            int i, j;
            double total;

            //Apply input to the network
            for (i = 0; i < PreInputNum; i++)
            {
                PreInputLayer[i].Value = pattern[i];
            }

            //Calculate The First(Input) Layer's Inputs and Outputs
            for (i = 0; i < InputNum; i++)
            {
                total = 0.0;
                for (j = 0; j < PreInputNum; j++)
                {
                    total += PreInputLayer[j].Value * PreInputLayer[j].Weights[i];
                }
                InputLayer[i].InputSum = total;
                InputLayer[i].Output = F(total);
            }

            //Calculate The Second(Hidden) Layer's Inputs and Outputs
            for (i = 0; i < HiddenNum; i++)
            {
                total = 0.0;
                for (j = 0; j < InputNum; j++)
                {
                    total += InputLayer[j].Output * InputLayer[j].Weights[i];
                }

                HiddenLayer[i].InputSum = total;
                HiddenLayer[i].Output = F(total);
            }

            //Calculate The Third(Output) Layer's Inputs, Outputs, Targets and Errors
            for (i = 0; i < OutputNum; i++)
            {
                total = 0.0;
                for (j = 0; j < HiddenNum; j++)
                {
                    total += HiddenLayer[j].Output * HiddenLayer[j].Weights[i];
                }

                OutputLayer[i].InputSum = total;
                OutputLayer[i].output = F(total);
                OutputLayer[i].Target = OutputLayer[i].Value.CompareTo(output) == 0 ? 1.0 : 0.0;
                OutputLayer[i].Error = (OutputLayer[i].Target - OutputLayer[i].output) * (OutputLayer[i].output) * (1 - OutputLayer[i].output);
            }
        }

        public double GetError()
        {
            double total = 0.0;
            for (int j = 0; j < OutputNum; j++)
            {
                total += Math.Pow((OutputLayer[j].Target - OutputLayer[j].output), 2) / 2;
            }
            return total;
        }

        public void InitializeNetwork(Dictionary<T, double[]> TrainingSet)
        {
            int i, j;
            Random rand = new Random();
            for (i = 0; i < PreInputNum; i++)
            {
                PreInputLayer[i].Weights = new double[InputNum];
                for (j = 0; j < InputNum; j++)
                {
                    PreInputLayer[i].Weights[j] = 0.01 + ((double)rand.Next(0, 8) / 100);
                }
            }

            for (i = 0; i < InputNum; i++)
            {
                InputLayer[i].Weights = new double[HiddenNum];
                for (j = 0; j < HiddenNum; j++)
                {
                    InputLayer[i].Weights[j] = 0.01 + ((double)rand.Next(0, 8) / 100);
                }
            }

            for (i = 0; i < HiddenNum; i++)
            {
                HiddenLayer[i].Weights = new double[OutputNum];
                for (j = 0; j < OutputNum; j++)
                {
                    HiddenLayer[i].Weights[j] = 0.01 + ((double)rand.Next(0, 8) / 100);
                }
            }

            int k = 0;
            foreach (KeyValuePair<T, double[]> p in TrainingSet)
            {
                OutputLayer[k++].Value = p.Key;
            }
        }

        public void Recognize(double[] Input, ref T MatchedHigh, ref double OutputValueHight, ref T MatchedLow, ref double OutputValueLow)
        {
            int i, j;
            double total = 0.0;
            double max = -1;

            //Apply input to the network
            for (i = 0; i < PreInputNum; i++)
            {
                PreInputLayer[i].Value = Input[i];
            }

            //Calculate Input Layer's Inputs and Outputs
            for (i = 0; i < InputNum; i++)
            {
                total = 0.0;
                for (j = 0; j < PreInputNum; j++)
                {
                    total += PreInputLayer[j].Value * PreInputLayer[j].Weights[i];
                }
                InputLayer[i].InputSum = total;
                InputLayer[i].Output = F(total);
            }

            //Calculate Hidden Layer's Inputs and Outputs
            for (i = 0; i < HiddenNum; i++)
            {
                total = 0.0;
                for (j = 0; j < InputNum; j++)
                {
                    total += InputLayer[j].Output * InputLayer[j].Weights[i];
                }

                HiddenLayer[i].InputSum = total;
                HiddenLayer[i].Output = F(total);
            }

            //Find the [Two] Highest Outputs
            for (i = 0; i < OutputNum; i++)
            {
                total = 0.0;
                for (j = 0; j < HiddenNum; j++)
                {
                    total += HiddenLayer[j].Output * HiddenLayer[j].Weights[i];
                }
                OutputLayer[i].InputSum = total;
                OutputLayer[i].output = F(total);
                if (OutputLayer[i].output > max)
                {
                    MatchedLow = MatchedHigh;
                    OutputValueLow = max;
                    max = OutputLayer[i].output;
                    MatchedHigh = OutputLayer[i].Value;
                    OutputValueHight = max;
                }
            }
        }

        #endregion

        public double LearningRate
        {
            get { return learningRate; }
            set { learningRate = value; }
        }
    }
}

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)

Share

About the Author

Murat Firat
Software Developer (Senior)
Turkey Turkey
Has BS degree on CS, working as SW engineer at istanbul.

| Advertise | Privacy | Mobile
Web03 | 2.8.141022.2 | Last Updated 30 Oct 2007
Article Copyright 2007 by Murat Firat
Everything else Copyright © CodeProject, 1999-2014
Terms of Service
Layout: fixed | fluid