Click here to Skip to main content
Click here to Skip to main content
Add your own
alternative version

Multiple convolution neural networks approach for online handwriting recognition

, 9 Apr 2013 CPOL
The research focuses on the presentation of word recognition technique for an online handwriting recognition system which uses multiple component neural networks (MCNN) as the exchangeable parts of the classifier.
capital_letter_v2.zip
capital letter v2.nnt
digit_v2.zip
digit v2.nnt
lower_case_letter_v2.zip
lower case letter v2.nnt
UNIPENviewer_source_code-noexe.zip
UNIPENviewer
DocToolkit
DocToolkit.csproj.user
DocToolkit.snk
obj
Debug
Release
DrawTools
about.bmp
App.ico
bin
Debug
Release
DrawTools.csproj.user
ellipse.bmp
Ellipse.cur
line.bmp
Line.cur
new.bmp
obj
Debug
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Release
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
open.bmp
pencil.bmp
Pencil.cur
pointer.bmp
PolyHandle.cur
rectangle.bmp
Rectangle.cur
save.bmp
NNControl
bin
Debug
Common
NNTesting
NNTraining
obj
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTesting.TextSpellControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
UPControl.Common.BaseControl.resources
UPControl.Common.UPTemplateControl.resources
UPControl.FlashForm.resources
UPControl.NNTraining.UP_NNTrainingControl.resources
UPControl.TrainingParametersForm.resources
UPControl.UPViewer.UpImageViewer.resources
UP_NeuralTraining.FlashForm.resources
UP_NeuralTraining.TrainingParametersForm.resources
UP_NeuralTraining.UP_NNTrainingControl.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
Properties
Resources
btnBack.png
btnDrag.png
btnFitToScreen.png
btnNext.png
btnOpen.png
btnPreview.png
btnRotate270.png
btnRotate90.png
btnSelect.png
btnZoomIn.png
btnZoomOut.png
circle.png
clear.png
color_line.png
cry.png
document-new.png
Drag.cur
draw_line.png
ellipse.png
export.png
file.png
fingerprint-recognition.png
folder-open.png
folder.png
folders_explorer.png
Grab.cur
home.png
label-link.png
pointer.png
rectangle.png
save_accept.png
script_(stop).gif
smile.png
stock_draw-line.png
Stop sign.png
Upload.png
user-group-new.png
UPViewer
Perceptron
ActivationFunction
ArchiveSerialization
bin
Debug
Common
Connections
Layers
Network
Neurons
obj
Debug
ANN.Perceptron.Common.BaseControl.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Neurons.BaseControl.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Neurons.BaseControl.resources
Properties
Weights
SpellChecker
bin
Debug
Controls
Dictionary
Affix
Dictionary.bmp
Phonetic
doc
NetSpell.ndoc
Forms
Interactive.bmp
obj
Debug
ResGen.read.1.tlog
ResGen.write.1.tlog
SpellChecker.Dictionary.WordDictionary.resources
SpellChecker.Forms.OptionForm.resources
SpellChecker.MultipleSpelling.resources
SpellChecker.NewSpelling.resources
Spell.snk
Spelling.bmp
UNIPENviewer.suo
UNIPENviewer.v11.suo
UNIPENviewer
bin
Debug
Config
de-DE.dic
en-US.dic
fr-FR.dic
it-IT.dic
UNIPENviewer.vshost.exe.manifest
Release
Config
obj
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
x86
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.Form1.resources
UNIPENviewer.Properties.Resources.resources
Properties
Settings.settings
UPImage
Common
Data
FileFormat
obj
Debug
Release
Properties
UNIPENviewer_source_code.zip
bin
Debug
DocToolkit.dll
Release
DocToolkit.dll
DocToolkit.csproj.user
DocToolkit.snk
DocToolkit.dll
DocToolkit.dll
about.bmp
App.ico
DocToolkit.dll
DrawTools.dll
DocToolkit.dll
DrawTools.dll
DrawTools.csproj.user
ellipse.bmp
Ellipse.cur
line.bmp
Line.cur
new.bmp
DrawTools.dll
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
DrawTools.dll
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
open.bmp
pencil.bmp
Pencil.cur
pointer.bmp
PolyHandle.cur
rectangle.bmp
Rectangle.cur
save.bmp
DocToolkit.dll
DrawTools-LinhLam.dll
DrawTools.dll
NNControl-LinhLam.dll
NNControl.dll
Perceptron.dll
SpellChecker.dll
UP-NeuralTraining.dll
UPImage-LinhLam.dll
UpImage.dll
Release
DocToolkit.dll
DrawTools.dll
Neurons.dll
NNControl.dll
UPImage.dll
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl-LinhLam.dll
NNControl.Common.UPTemplateControl.resources
NNControl.dll
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTesting.TextSpellControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
TempPE
Properties.Resources.Designer.cs.dll
UP-NeuralTraining.dll
UPControl.Common.BaseControl.resources
UPControl.Common.UPTemplateControl.resources
UPControl.FlashForm.resources
UPControl.NNTraining.UP_NNTrainingControl.resources
UPControl.TrainingParametersForm.resources
UPControl.UPViewer.UpImageViewer.resources
UP_NeuralTraining.FlashForm.resources
UP_NeuralTraining.TrainingParametersForm.resources
UP_NeuralTraining.UP_NNTrainingControl.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.dll
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
TempPE
Properties.Resources.Designer.cs.dll
btnBack.png
btnDrag.png
btnFitToScreen.png
btnNext.png
btnOpen.png
btnPreview.png
btnRotate270.png
btnRotate90.png
btnSelect.png
btnZoomIn.png
btnZoomOut.png
circle.png
clear.png
color_line.png
cry.png
document-new.png
Drag.cur
draw_line.png
ellipse.png
export.png
file.png
fingerprint-recognition.png
folder-open.png
folder.png
folders_explorer.png
Grab.cur
home.png
label-link.png
pointer.png
rectangle.png
save_accept.png
script_(stop).gif
smile.png
stock_draw-line.png
Stop sign.png
Upload.png
user-group-new.png
AForgeLibrary.dll
NeuralNetworkLibrary.dll
Perceptron.dll
SpellChecker.dll
UpImage.dll
Release
NeuralNetworkLibrary.dll
Neurons.dll
UPImage.dll
ANN.Perceptron.Common.BaseControl.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NeuralNetworkLibrary.dll
Neurons.BaseControl.resources
Perceptron.dll
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NeuralNetworkLibrary.dll
Neurons.BaseControl.resources
Neurons.dll
SpellChecker.dll
Dictionary.bmp
NetSpell.ndoc
Interactive.bmp
NetSpell.SpellChecker.dll
ResGen.read.1.tlog
ResGen.write.1.tlog
SpellChecker.Dictionary.WordDictionary.resources
SpellChecker.dll
SpellChecker.Forms.OptionForm.resources
SpellChecker.MultipleSpelling.resources
SpellChecker.NewSpelling.resources
Spell.snk
Spelling.bmp
UNIPENviewer.suo
UNIPENviewer.v11.suo
de-DE.dic
DocToolkit.dll
DrawTools-LinhLam.dll
DrawTools.dll
en-US.dic
fr-FR.dic
it-IT.dic
NNControl-LinhLam.dll
NNControl.dll
Perceptron.dll
SpellChecker.dll
UNIPENviewer-LinhLam.exe
UNIPENviewer.exe
UNIPENviewer.vshost.exe
UNIPENviewer.vshost.exe.manifest
UPImage-LinhLam.dll
UpImage.dll
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
TempPE
Properties.Resources.Designer.cs.dll
UNIPENviewer-LinhLam.exe
UNIPENviewer.exe
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.exe
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.exe
UNIPENviewer.Form1.resources
UNIPENviewer.Properties.Resources.resources
Settings.settings
bin
Debug
AForgeLibrary.dll
UPDatabase.dll
UpImage-LinhLam.dll
UpImage.dll
UPUnipenLib.dll
Release
UPImage.dll
UPDatabase.dll
UpImage-LinhLam.dll
UpImage.dll
UPUnipenLib.dll
UPImage.dll
Word_samples.zip
Word samples
beach.dtl
Bengal.dtl
best.dtl
Brower.dtl
Buy.dtl
Byte byte.dtl
Change.dtl
Color.dtl
Company.dtl
Complet.dtl
copy.dtl
cup.dtl
draw tool.dtl
Eastern.dtl
Eat.dtl
eldest.dtl
Emergency.dtl
English.dtl
File.dtl
Finist.dtl
found nothing.dtl
France.dtl
Future.dtl
help me now.dtl
Hey hello.dtl
Hobby.dtl
How are you.dtl
Huck.dtl
icon.dtl
inker.dtl
Internet.dtl
Link.dtl
load.dtl
Local.dtl
Lonely.dtl
loosen.dtl
love you.dtl
Madden.dtl
Main gate.dtl
Mercy.dtl
Module.dtl
monday.dtl
Moon.dtl
mouse.dtl
my turn.dtl
net spell.dtl
network.dtl
never.dtl
newest.dtl
noted.dtl
Novel.dtl
oldest.dtl
Option.dtl
Pencil.dtl
petro.dtl
Pink.dtl
quick and slow.dtl
Rock.dtl
save.dtl
Sell.dtl
slam.dtl
smart phone.dtl
Strong.dtl
Strongest.dtl
success.dtl
Summer.dtl
Take.dtl
Text.dtl
Took.dtl
Train.dtl
Tuesday.dtl
Valence.dtl
Victory.dtl
viewer.dtl
vintage.dtl
Volume.dtl
water.dtl
Weak.dtl
Window.dtl
Windy.dtl
word expert.dtl
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
using ANN.Perceptron.ArchiveSerialization;
using System.Threading;
using System.Threading.Tasks;
using UPImage.Common;
using UPImage.Data;
using ANN.Perceptron.Common;
using ANN.Perceptron.Neurons;
namespace ANN.Perceptron.Network
{
 
    public class ForwardPropagation:NetworkProvider,IDisposable
    {
        protected bool _bDataReady;
        //backpropagation and training-related members
        protected ConvolutionNetwork network;
        protected double[] m_DispH;  // horiz distortion map array
        protected double[] m_DispV;  // vert distortion map array
        protected Size inputImageSize;
        protected int inputNeuronCount;
        protected double[,] gaussianKernel = new double[NNDefinations.GAUSSIAN_FIELD_SIZE, NNDefinations.GAUSSIAN_FIELD_SIZE];
        protected ByteImageData[] patternsData;
        private ThreadSafeRandom RandomGenerator = new ThreadSafeRandom();
     //   public double EtaLearningRatePrevious;
        public double EtaLearningRate;
        public HiPerfTimer HPTime;
        public int PatternCount { get; set; }
        public int CurrentPattern { get; set; }
        public NetworkParameters Parameters { get; set; }
        public ByteImageData[] PatternsData
        {
            get
            {
                return patternsData;
            }
            set
            {
                if (patternsData == value)
                    return;
                patternsData = value;
                PatternCount =(int) patternsData.Count();
            }
        }
        public List<Char> Letters;
        //functions
        public ConvolutionNetwork Network
        {
            get
            {
                return network;
            }
            set
            {
                if (network == value)
                    return;
                network = value;
            }
        }
        public ForwardPropagation():base()
        {
            CurrentPattern = 0;
            _bDataReady = false;
            network = null;
        
            HPTime = new HiPerfTimer();
            PatternCount = 0;
            // allocate memory to store the distortion maps
            network = null;
            inputImageSize.Width = 0;
            inputImageSize.Height = 0;
            inputNeuronCount = inputImageSize.Height * inputImageSize.Width;
            m_DispH = new double[inputNeuronCount];
            m_DispV = new double[inputNeuronCount];
            Letters = null;
        }
        public ForwardPropagation(ConvolutionNetwork net):this()
        {
            CurrentPattern = 0;
            _bDataReady = false;
            network = null;
           
            HPTime = new HiPerfTimer();
            PatternCount = 0;
            // allocate memory to store the distortion maps
            network = net;
            inputImageSize = net.InputDesignedPatternSize;
            inputNeuronCount = inputImageSize.Height * inputImageSize.Width;
            patternsData = null;
            m_DispH = new double[inputNeuronCount];
            m_DispV = new double[inputNeuronCount];


        }
        protected void GetGaussianKernel(double _dElasticSigma)
        {
            // create a gaussian kernel, which is constant, for use in generating elastic distortions

            int iiMid = NNDefinations.GAUSSIAN_FIELD_SIZE / 2;  // GAUSSIAN_FIELD_SIZE is strictly odd

            double twoSigmaSquared = 2.0 * (_dElasticSigma) * (_dElasticSigma);
            twoSigmaSquared = 1.0 / twoSigmaSquared;
            double twoPiSigma = 1.0 / (_dElasticSigma) * Math.Sqrt(2.0 * Math.PI);

            /*TODO: Check potentially-changing upper bound expression "NNDefinations.GAUSSIAN_FIELD_SIZE" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, NNDefinations.GAUSSIAN_FIELD_SIZE,ParallelOption, col =>
            {
                for (int row = 0; row < NNDefinations.GAUSSIAN_FIELD_SIZE; ++row)
                {
                    gaussianKernel[row, col] = twoPiSigma * (Math.Exp(-(((row - iiMid) * (row - iiMid) + (col - iiMid) * (col - iiMid)) * twoSigmaSquared)));
                }
            });
        }
        public void Calculate(double[] inputVector, int iCount,
                                 double[] outputVector /* =NULL */, int oCount /* =0 */,
                                 NeuronOutputs[] pNeuronOutputs /* =NULL */ )
        {
            var lit = network.Layers.First();
            // first layer is imput layer: directly set outputs of all of its neurons
            // to the input vector
            if (network.LayerCount > 1)
            {
                Parallel.For(0,lit.NeuronCount,ParallelOption, i =>
                {
                    var nit = lit.Neurons[i];
                    nit.output = inputVector[i];
                });

            }
            //caculate output of next layers
            for (int i = 1; i < network.LayerCount; i++)
            {
                network.Layers[i].Calculate();
            }

            // load up output vector with results

            if (outputVector != null)
            {
                lit = network.Layers[network.LayerCount - 1];

                Parallel.For(0, oCount, ParallelOption, ii =>
                {
                    outputVector[ii] = lit.Neurons[ii].output;
                });
            }

            // load up neuron output values with results
            if (pNeuronOutputs != null)
            {
                // check for first time use (re-use is expected)
             
                // it's empty, so allocate memory for its use
                pNeuronOutputs = new NeuronOutputs[network.LayerCount];

                /*TODO: Check potentially-changing upper bound expression "network.LayerCount" which is now called only *once*,
                to ensure the new Parallel.For call matches behavior in the original for-loop
               (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
                Parallel.For(0, network.LayerCount, ParallelOption, index =>
                {
                    var nnlit = network.Layers[index];
                    var layerOut = new NeuronOutputs();
                    for (int ii = 0; ii < nnlit.NeuronCount; ++ii)
                    {
                        layerOut.Add(nnlit.Neurons[ii].output);
                    }
                    pNeuronOutputs[index] = layerOut;
                });


            }
        }
        /////////////////////////
        /// <summary>
        /// Get Next Parttern in Parttern List
        /// </summary>
        /// <param name="iSequenceNum"></param>
        /// <param name="bFromRandomizedPatternSequence"></param>
        /// <returns></returns>
        public void CalculateNeuralNet(double[] inputVector, int count,
                                   double[] outputVector /* =NULL */, int oCount /* =0 */,
                                   NeuronOutputs[] pNeuronOutputs /* =NULL */,
                                   bool bDistort /* =FALSE */ )
        {
            // wrapper function for neural net's Calculate() function, needed because the NN is a protected member
            // waits on the neural net mutex (using the CAutoMutex object, which automatically releases the
            // mutex when it goes out of scope) so as to restrict access to one thread at a time
         
            {
                if (bDistort != false)
                {
                    GenerateDistortionMap(1.0);
                    ApplyDistortionMap(inputVector);
                }


                Calculate(inputVector, count, outputVector, oCount, pNeuronOutputs);
            }
         
        }
        /// <summary>
        /// Distortion Pattern
        /// </summary>
        /// <param name="inputVector"></param>
        protected void ApplyDistortionMap(double[] inputVector)
        {
            // applies the current distortion map to the input vector

            // For the mapped array, we assume that 0.0 == background, and 1.0 == full intensity information
            // This is different from the input vector, in which +1.0 == background (white), and 
            // -1.0 == information (black), so we must convert one to the other

            double[,] mappedVector = new double[inputImageSize.Width,inputImageSize.Height];
            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Width, ParallelOption, i =>
            {
                for (int j = 0; j < inputImageSize.Height; j++)
                {
                    mappedVector[i, j] = 0.0;
                }
            });



            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Width, ParallelOption, row =>
            {
                for (int col = 0; col < inputImageSize.Height; ++col)
                {
                    // the pixel at sourceRow, sourceCol is an "phantom" pixel that doesn't really exist, and
                    // whose value must be manufactured from surrounding real pixels (i.e., since 
                    // sourceRow and sourceCol are floating point, not ints, there's not a real pixel there)
                    // The idea is that if we can calculate the value of this phantom pixel, then its 
                    // displacement will exactly fit into the current pixel at row, col (which are both ints)
                    double sourceRow, sourceCol;
                    double fracRow, fracCol;
                    double w1, w2, w3, w4;
                    double sourceValue;
                    int sRow, sCol, sRowp1, sColp1;
                    bool bSkipOutOfBounds;
                    sourceRow = (double)row - m_DispV[row * inputImageSize.Height + col];
                    sourceCol = (double)col - m_DispH[row * inputImageSize.Height + col];
                    // weights for bi-linear interpolation
                    fracRow = sourceRow - (int)sourceRow;
                    fracCol = sourceCol - (int)sourceCol;
                    w1 = (1.0 - fracRow) * (1.0 - fracCol);
                    w2 = (1.0 - fracRow) * fracCol;
                    w3 = fracRow * (1 - fracCol);
                    w4 = fracRow * fracCol;
                    // limit indexes
                    /*
                                            while (sourceRow >= mpatternSize.Width ) sourceRow -= mpatternSize.Width;
                                            while (sourceRow < 0 ) sourceRow += mpatternSize.Width;
            			
                                            while (sourceCol >= mpatternSize.Height ) sourceCol -= mpatternSize.Height;
                                            while (sourceCol < 0 ) sourceCol += mpatternSize.Height;
                                */
                    bSkipOutOfBounds = false;
                    if ((sourceRow + 1.0) >= inputImageSize.Width)
                        bSkipOutOfBounds = true;
                    if (sourceRow < 0)
                        bSkipOutOfBounds = true;
                    if ((sourceCol + 1.0) >= inputImageSize.Height)
                        bSkipOutOfBounds = true;
                    if (sourceCol < 0)
                        bSkipOutOfBounds = true;
                    if (bSkipOutOfBounds == false)
                    {
                        // the supporting pixels for the "phantom" source pixel are all within the 
                        // bounds of the character grid.
                        // Manufacture its value by bi-linear interpolation of surrounding pixels
                        sRow = (int)sourceRow;
                        sCol = (int)sourceCol;
                        sRowp1 = sRow + 1;
                        sColp1 = sCol + 1;
                        while (sRowp1 >= inputImageSize.Width)
                            sRowp1 -= inputImageSize.Width;
                        while (sRowp1 < 0)
                            sRowp1 += inputImageSize.Width;
                        while (sColp1 >= inputImageSize.Height)
                            sColp1 -= inputImageSize.Height;
                        while (sColp1 < 0)
                            sColp1 += inputImageSize.Height;
                        // perform bi-linear interpolation
                        sourceValue = w1 * inputVector[sRow * inputImageSize.Height + sCol] + w2 * w1 * inputVector[sRow * inputImageSize.Height + sColp1] + w3 * w1 * inputVector[sRowp1 * inputImageSize.Height + sCol] + w4 * w1 * inputVector[sRowp1 * inputImageSize.Height + sColp1];
                    }
                    else
                    {
                        // At least one supporting pixel for the "phantom" pixel is outside the
                        // bounds of the character grid. Set its value to "background"
                        sourceValue = 1.0; // "background" color in the -1 -> +1 range of inputVector
                    }
                    mappedVector[row, col] = 0.5 * (1.0 - sourceValue); // conversion to 0->1 range we are using for mappedVector
                }
            });

            // now, invert again while copying back into original vector

            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Width, ParallelOption, row =>
            {
                for (int col = 0; col < inputImageSize.Height; ++col)
                {
                    inputVector[row * inputImageSize.Height + col] = 1.0 - 2.0 * mappedVector[row, col];
                }
            });

        }
        /// <summary>
        /// 
        /// </summary>
        /// <param name="severityFactor"></param>
        protected void GenerateDistortionMap(double severityFactor /* =1.0 */ )
        {
            // generates distortion maps in each of the horizontal and vertical directions
            // Three distortions are applied: a scaling, a rotation, and an elastic distortion
            // Since these are all linear tranformations, we can simply add them together, after calculation
            // one at a time

            // The input parameter, severityFactor, let's us control the severity of the distortions relative
            // to the default values.  For example, if we only want half as harsh a distortion, set
            // severityFactor == 0.5

            // First, elastic distortion, per Patrice Simard, "Best Practices For Convolutional Neural Networks..."
            // at page 2.
            // Three-step process: seed array with uniform randoms, filter with a gaussian kernel, normalize (scale)

            double[] uniformH = new double[inputNeuronCount];
            double[] uniformV = new double[inputNeuronCount];
            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Height" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Height,ParallelOption, col =>
            {
                for (int row = 0; row < inputImageSize.Width; ++row)
                {
                    uniformH[row * inputImageSize.Height + col] = (double)(2.0 * RandomGenerator.NextDouble() - 1.0);
                    uniformV[row * inputImageSize.Height + col] = (double)(2.0 * RandomGenerator.NextDouble() - 1.0);
                }
            });

            // filter with gaussian



            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Height" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Height" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Height, ParallelOption, col =>
            {
                for (int row = 0; row < inputImageSize.Width; ++row)
                {
                    double fConvolvedH, fConvolvedV;
                    double fSampleH, fSampleV;
                    double elasticScale = severityFactor * Parameters.ElasticScaling;
                    int xxxDisp, yyyDisp;
                    int iiMid = NNDefinations.GAUSSIAN_FIELD_SIZE / 2; // GAUSSIAN_FIELD_SIZE (21) is strictly odd
                    fConvolvedH = 0.0;
                    fConvolvedV = 0.0;
                    for (int xxx = 0; xxx < NNDefinations.GAUSSIAN_FIELD_SIZE; ++xxx)
                    {
                        for (int yyy = 0; yyy < NNDefinations.GAUSSIAN_FIELD_SIZE; ++yyy)
                        {
                            xxxDisp = col - iiMid + xxx;
                            yyyDisp = row - iiMid + yyy;
                            if (xxxDisp < 0 || xxxDisp >= inputImageSize.Height || yyyDisp < 0 || yyyDisp >= inputImageSize.Width)
                            {
                                fSampleH = 0.0;
                                fSampleV = 0.0;
                            }
                            else
                            {
                                fSampleH = uniformH[yyyDisp * inputImageSize.Height + xxxDisp];
                                fSampleV = uniformV[yyyDisp * inputImageSize.Height + xxxDisp];
                            }
                            fConvolvedH += fSampleH * gaussianKernel[yyy, xxx];
                            fConvolvedV += fSampleV * gaussianKernel[yyy, xxx];
                        }
                    }
                    m_DispH[row * inputImageSize.Height + col] = elasticScale * fConvolvedH;
                    m_DispV[row * inputImageSize.Height + col] = elasticScale * fConvolvedV;
                }
            });
            uniformH = null;
            uniformV = null;

            // next, the scaling of the image by a random scale factor
            // Horizontal and vertical directions are scaled independently

            double dSFHoriz = severityFactor * Parameters.MaxScaling / 100.0 * (2.0 * RandomGenerator.NextDouble() - 1.0);  // m_dMaxScaling is a percentage
            double dSFVert = severityFactor * Parameters.MaxScaling / 100.0 * (2.0 * RandomGenerator.NextDouble() - 1.0);  // m_dMaxScaling is a percentage


            int iMid = inputImageSize.Width / 2;

            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Width, ParallelOption, row =>
            {
                for (int col = 0; col < inputImageSize.Height; ++col)
                {
                    m_DispH[row * inputImageSize.Height + col] = m_DispH[row * inputImageSize.Height + col] + dSFHoriz * (col - iMid);
                    m_DispV[row * inputImageSize.Height + col] = m_DispV[row * inputImageSize.Height + col] - dSFVert * (iMid - row); // negative because of top-down bitmap
                }
            });


            // finally, apply a rotation


            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            /*TODO: Check potentially-changing upper bound expression "inputImageSize.Width" which is now called only *once*,
            to ensure the new Parallel.For call matches behavior in the original for-loop
           (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
            Parallel.For(0, inputImageSize.Width, ParallelOption, row =>
            {
                for (int col = 0; col < inputImageSize.Height; ++col)
                {
                    double angle = severityFactor * Parameters.MaxRotation * (2.0 * RandomGenerator.NextDouble() - 1.0);
                    angle = angle * Math.PI / 180.0; // convert from degrees to radians
                    double cosAngle = Math.Cos(angle);
                    double sinAngle = Math.Sin(angle);
                    m_DispH[row * inputImageSize.Height + col] = m_DispH[row * inputImageSize.Height + col] + (col - iMid) * (cosAngle - 1) - (iMid - row) * sinAngle;
                    m_DispV[row * inputImageSize.Height + col] = m_DispV[row * inputImageSize.Height + col] - (iMid - row) * (cosAngle - 1) + (col - iMid) * sinAngle; // negative because of top-down bitmap
                }
            });

        }
        public void Dispose()
        {
            Dispose(true);
            RandomGenerator.Dispose();
            network.Dispose();
        }
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {

            }
        }
        ~ForwardPropagation()
        {
            Dispose(false);
            RandomGenerator = null;
            network=null;
        }
    }
}

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)

Share

About the Author

Vietdungiitb
Vietnam Maritime University
Vietnam Vietnam
No Biography provided

| Advertise | Privacy | Terms of Use | Mobile
Web04 | 2.8.141223.1 | Last Updated 10 Apr 2013
Article Copyright 2013 by Vietdungiitb
Everything else Copyright © CodeProject, 1999-2014
Layout: fixed | fluid