Click here to Skip to main content
Click here to Skip to main content
Add your own
alternative version

Multiple convolution neural networks approach for online handwriting recognition

, 9 Apr 2013
The research focuses on the presentation of word recognition technique for an online handwriting recognition system which uses multiple component neural networks (MCNN) as the exchangeable parts of the classifier.
capital_letter_v2.zip
digit_v2.zip
lower_case_letter_v2.zip
UNIPENviewer_source_code-noexe.zip
UNIPENviewer
DocToolkit
DocToolkit.csproj.user
DocToolkit.snk
obj
Debug
Release
DrawTools
about.bmp
App.ico
bin
Debug
Release
DrawTools.csproj.user
ellipse.bmp
Ellipse.cur
line.bmp
Line.cur
new.bmp
obj
Debug
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Release
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
open.bmp
pencil.bmp
Pencil.cur
pointer.bmp
PolyHandle.cur
rectangle.bmp
Rectangle.cur
save.bmp
NNControl
bin
Debug
Common
NNTesting
NNTraining
obj
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTesting.TextSpellControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
UPControl.Common.BaseControl.resources
UPControl.Common.UPTemplateControl.resources
UPControl.FlashForm.resources
UPControl.NNTraining.UP_NNTrainingControl.resources
UPControl.TrainingParametersForm.resources
UPControl.UPViewer.UpImageViewer.resources
UP_NeuralTraining.FlashForm.resources
UP_NeuralTraining.TrainingParametersForm.resources
UP_NeuralTraining.UP_NNTrainingControl.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
Properties
Resources
btnBack.png
btnDrag.png
btnFitToScreen.png
btnNext.png
btnOpen.png
btnPreview.png
btnRotate270.png
btnRotate90.png
btnSelect.png
btnZoomIn.png
btnZoomOut.png
circle.png
clear.png
color_line.png
cry.png
document-new.png
Drag.cur
draw_line.png
ellipse.png
export.png
file.png
fingerprint-recognition.png
folder-open.png
folder.png
folders_explorer.png
Grab.cur
home.png
label-link.png
pointer.png
rectangle.png
save_accept.png
script_(stop).gif
smile.png
stock_draw-line.png
Stop sign.png
Upload.png
user-group-new.png
UPViewer
Perceptron
ActivationFunction
ArchiveSerialization
bin
Debug
Common
Connections
Layers
Network
Neurons
obj
Debug
ANN.Perceptron.Common.BaseControl.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Neurons.BaseControl.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
Neurons.BaseControl.resources
Properties
Weights
SpellChecker
bin
Debug
Controls
Dictionary
Affix
Dictionary.bmp
Phonetic
doc
NetSpell.ndoc
Forms
Interactive.bmp
obj
Debug
ResGen.read.1.tlog
ResGen.write.1.tlog
SpellChecker.Dictionary.WordDictionary.resources
SpellChecker.Forms.OptionForm.resources
SpellChecker.MultipleSpelling.resources
SpellChecker.NewSpelling.resources
Spell.snk
Spelling.bmp
UNIPENviewer.suo
UNIPENviewer.v11.suo
UNIPENviewer
bin
Debug
Config
de-DE.dic
en-US.dic
fr-FR.dic
it-IT.dic
UNIPENviewer.vshost.exe.manifest
Release
Config
obj
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
Release
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.MainForm.resources
UNIPENviewer.Properties.Resources.resources
x86
Debug
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
UNIPENviewer.Form1.resources
UNIPENviewer.Properties.Resources.resources
Properties
Settings.settings
UPImage
Common
Data
FileFormat
obj
Debug
Release
Properties
UNIPENviewer_source_code.zip
bin
Debug
DocToolkit.dll
DocToolkit.pdb
Release
DocToolkit.dll
DocToolkit.csproj.user
DocToolkit.snk
DesignTimeResolveAssemblyReferencesInput.cache
DocToolkit.dll
DocToolkit.pdb
TempPE
DesignTimeResolveAssemblyReferencesInput.cache
DocToolkit.dll
about.bmp
App.ico
DocToolkit.dll
DocToolkit.pdb
DrawTools.dll
DrawTools.pdb
DocToolkit.dll
DrawTools.dll
DrawTools.csproj.user
ellipse.bmp
Ellipse.cur
line.bmp
Line.cur
new.bmp
DesignTimeResolveAssemblyReferencesInput.cache
DrawTools.csproj.GenerateResource.Cache
DrawTools.csprojResolveAssemblyReference.cache
DrawTools.dll
DrawTools.DrawArea.resources
DrawTools.pdb
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
ResolveAssemblyReference.cache
TempPE
DesignTimeResolveAssemblyReferencesInput.cache
DrawTools.dll
DrawTools.DrawArea.resources
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
ResolveAssemblyReference.cache
open.bmp
pencil.bmp
Pencil.cur
pointer.bmp
PolyHandle.cur
rectangle.bmp
Rectangle.cur
Resources
save.bmp
DocToolkit.dll
DocToolkit.pdb
DrawTools-LinhLam.dll
DrawTools-LinhLam.pdb
DrawTools.dll
DrawTools.pdb
NNControl-LinhLam.dll
NNControl-LinhLam.pdb
NNControl.dll
NNControl.pdb
Perceptron.dll
Perceptron.pdb
SpellChecker.dll
SpellChecker.pdb
UP-NeuralTraining.dll
UP-NeuralTraining.pdb
UPImage-LinhLam.dll
UpImage.dll
UpImage.pdb
Release
DocToolkit.dll
DrawTools.dll
Neurons.dll
Neurons.pdb
NNControl.dll
NNControl.pdb
UPImage.dll
UPImage.pdb
DesignTimeResolveAssemblyReferences.cache
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl-LinhLam.dll
NNControl-LinhLam.pdb
NNControl.Common.UPTemplateControl.resources
NNControl.csproj.GenerateResource.Cache
NNControl.csprojResolveAssemblyReference.cache
NNControl.dll
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTesting.TextSpellControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.pdb
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
ResolveAssemblyReference.cache
TempPE
Properties.Resources.Designer.cs.dll
UP-NeuralTraining.dll
UP-NeuralTraining.pdb
UPControl.Common.BaseControl.resources
UPControl.Common.UPTemplateControl.resources
UPControl.FlashForm.resources
UPControl.NNTraining.UP_NNTrainingControl.resources
UPControl.TrainingParametersForm.resources
UPControl.UPViewer.UpImageViewer.resources
UP_NeuralTraining.FlashForm.resources
UP_NeuralTraining.TrainingParametersForm.resources
UP_NeuralTraining.UP_NNTrainingControl.resources
DesignTimeResolveAssemblyReferences.cache
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NNControl.Common.UPTemplateControl.resources
NNControl.dll
NNControl.FlashForm.resources
NNControl.NNTesting.NNTestingControl.resources
NNControl.NNTraining.ConvolutionForm.resources
NNControl.NNTraining.CreateNetworkForm.resources
NNControl.NNTraining.FullConnectedForm.resources
NNControl.NNTraining.InputLayerForm.resources
NNControl.NNTraining.OutputLayerForm.resources
NNControl.NNTraining.UP_NNTrainingControl.resources
NNControl.pdb
NNControl.Properties.Resources.resources
NNControl.TrainingParametersForm.resources
NNControl.UPViewer.UpImageViewer.resources
ResolveAssemblyReference.cache
TempPE
Properties.Resources.Designer.cs.dll
btnBack.png
btnDrag.png
btnFitToScreen.png
btnNext.png
btnOpen.png
btnPreview.png
btnRotate270.png
btnRotate90.png
btnSelect.png
btnZoomIn.png
btnZoomOut.png
circle.png
clear.png
color_line.png
cry.png
document-new.png
Drag.cur
draw_line.png
ellipse.png
export.png
file.png
fingerprint-recognition.png
folder-open.png
folder.png
folders_explorer.png
Grab.cur
home.png
label-link.png
pointer.png
rectangle.png
save_accept.png
script_(stop).gif
smile.png
stock_draw-line.png
Stop sign.png
Upload.png
user-group-new.png
AForgeLibrary.dll
AForgeLibrary.pdb
NeuralNetworkLibrary.dll
NeuralNetworkLibrary.pdb
Perceptron.dll
Perceptron.pdb
SpellChecker.dll
SpellChecker.pdb
UpImage.dll
UpImage.pdb
Release
NeuralNetworkLibrary.dll
NeuralNetworkLibrary.pdb
Neurons.dll
Neurons.pdb
UPImage.dll
UPImage.pdb
ANN.Perceptron.Common.BaseControl.resources
ANN.Perceptron.csproj.GenerateResource.Cache
ANN.Perceptron.csprojResolveAssemblyReference.cache
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NeuralNetworkLibrary.dll
NeuralNetworkLibrary.pdb
Neurons.BaseControl.resources
Perceptron.dll
Perceptron.pdb
ResolveAssemblyReference.cache
TempPE
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
NeuralNetworkLibrary.dll
NeuralNetworkLibrary.pdb
Neurons.BaseControl.resources
Neurons.dll
Neurons.pdb
ResolveAssemblyReference.cache
Service References
SpellChecker.dll
SpellChecker.pdb
Release
Dictionary.bmp
NetSpell.ndoc
Interactive.bmp
DesignTimeResolveAssemblyReferences.cache
DesignTimeResolveAssemblyReferencesInput.cache
NetSpell.SpellChecker.dll
NetSpell.SpellChecker.pdb
ResGen.read.1.tlog
ResGen.write.1.tlog
SpellChecker.csproj.GenerateResource.Cache
SpellChecker.csprojResolveAssemblyReference.cache
SpellChecker.Dictionary.WordDictionary.resources
SpellChecker.dll
SpellChecker.Forms.OptionForm.resources
SpellChecker.MultipleSpelling.resources
SpellChecker.NewSpelling.resources
SpellChecker.pdb
TempPE
Spell.snk
Spelling.bmp
UNIPENviewer.suo
UNIPENviewer.v11.suo
de-DE.dic
DocToolkit.dll
DocToolkit.pdb
DrawTools-LinhLam.dll
DrawTools-LinhLam.pdb
DrawTools.dll
DrawTools.pdb
en-US.dic
fr-FR.dic
it-IT.dic
NNControl-LinhLam.dll
NNControl.dll
NNControl.pdb
Perceptron.dll
Perceptron.pdb
SpellChecker.dll
SpellChecker.pdb
UNIPENviewer-LinhLam.exe
UNIPENviewer-LinhLam.pdb
UNIPENviewer.exe
UNIPENviewer.pdb
UNIPENviewer.vshost.exe
UNIPENviewer.vshost.exe.manifest
UPImage-LinhLam.dll
UPImage-LinhLam.pdb
UpImage.dll
UpImage.pdb
DesignTimeResolveAssemblyReferences.cache
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
ResolveAssemblyReference.cache
TempPE
Properties.Resources.Designer.cs.dll
UNIPENviewer-LinhLam.exe
UNIPENviewer-LinhLam.pdb
UNIPENviewer.csproj.GenerateResource.Cache
UNIPENviewer.csprojResolveAssemblyReference.cache
UNIPENviewer.exe
UNIPENviewer.MainForm.resources
UNIPENviewer.pdb
UNIPENviewer.Properties.Resources.resources
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
ResolveAssemblyReference.cache
UNIPENviewer.exe
UNIPENviewer.MainForm.resources
UNIPENviewer.pdb
UNIPENviewer.Properties.Resources.resources
DesignTimeResolveAssemblyReferences.cache
DesignTimeResolveAssemblyReferencesInput.cache
GenerateResource.read.1.tlog
GenerateResource.write.1.tlog
ResolveAssemblyReference.cache
UNIPENviewer.exe
UNIPENviewer.Form1.resources
UNIPENviewer.pdb
UNIPENviewer.Properties.Resources.resources
Settings.settings
bin
Debug
AForgeLibrary.dll
AForgeLibrary.pdb
UPDatabase.dll
UPDatabase.pdb
UpImage-LinhLam.dll
UpImage-LinhLam.pdb
UpImage.dll
UpImage.pdb
UPUnipenLib.dll
UPUnipenLib.pdb
Release
UPImage.dll
UPImage.pdb
DesignTimeResolveAssemblyReferencesInput.cache
TempPE
UPDatabase.dll
UPDatabase.pdb
UpImage-LinhLam-2.pdb
UpImage-LinhLam.dll
UPImage-LinhLam.pdb
UpImage.dll
UpImage.pdb
UPUnipenLib.dll
UPUnipenLib.pdb
DesignTimeResolveAssemblyReferencesInput.cache
UPImage.dll
UPImage.pdb
Word_samples.zip
Word samples
beach.dtl
Bengal.dtl
best.dtl
Brower.dtl
Buy.dtl
Byte byte.dtl
Change.dtl
Color.dtl
Company.dtl
Complet.dtl
copy.dtl
cup.dtl
draw tool.dtl
Eastern.dtl
Eat.dtl
eldest.dtl
Emergency.dtl
English.dtl
File.dtl
Finist.dtl
found nothing.dtl
France.dtl
Future.dtl
help me now.dtl
Hey hello.dtl
Hobby.dtl
How are you.dtl
Huck.dtl
icon.dtl
inker.dtl
Internet.dtl
Link.dtl
load.dtl
Local.dtl
Lonely.dtl
loosen.dtl
love you.dtl
Madden.dtl
Main gate.dtl
Mercy.dtl
Module.dtl
monday.dtl
Moon.dtl
mouse.dtl
my turn.dtl
net spell.dtl
network.dtl
never.dtl
newest.dtl
noted.dtl
Novel.dtl
oldest.dtl
Option.dtl
Pencil.dtl
petro.dtl
Pink.dtl
quick and slow.dtl
Rock.dtl
save.dtl
Sell.dtl
slam.dtl
smart phone.dtl
Strong.dtl
Strongest.dtl
success.dtl
Summer.dtl
Take.dtl
Text.dtl
Took.dtl
Train.dtl
Tuesday.dtl
Valence.dtl
Victory.dtl
viewer.dtl
vintage.dtl
Volume.dtl
water.dtl
Weak.dtl
Window.dtl
Windy.dtl
word expert.dtl
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ANN.Perceptron.ArchiveSerialization;
using ANN.Perceptron.Layers;
using ANN.Perceptron.Neurons;
using System.IO;
using System.Threading.Tasks;
namespace ANN.Perceptron.Network
{
    // Neural Network class
    public class BackPropagation : ForwardPropagation
    {
        #region Properties
        public uint Backprops;  // counter used in connection with Weight sanity check
        #endregion
        public BackPropagation():base()
            {
                EtaLearningRate = .001;  // arbitrary, so that brand-new NNs can be serialized with a non-ridiculous number
                Backprops = 0;
                network = null;
            }
            public BackPropagation(ConvolutionNetwork net):base(net)
            {
                EtaLearningRate = .001;  // arbitrary, so that brand-new NNs can be serialized with a non-ridiculous number
                Backprops = 0;
                network = net;
            }
       
            public void Backpropagate(double[] actualOutput, double[] desiredOutput, int count, NeuronOutputs[] pMemorizedNeuronOutputs)
            { 
                        // backpropagates through the neural net
	
	            if(( network.LayerCount >= 2 )==false) // there must be at least two network.Layers in the net
                {
                    return;           
                }
	            if ( ( actualOutput == null ) || ( desiredOutput == null ) || ( count >= 256 ) )
		            return;
	
	
	            // check if it's time for a weight sanity check
	            if ( (Backprops % 10000) == 0 )
	            {
		            // every 10000 backprops
		
		            PeriodicWeightSanityCheck();
	            }
	
	            
	            // proceed from the last layer to the first, iteratively
	            // We calculate the last layer separately, and first, since it provides the needed derviative
	            // (i.e., dErr_wrt_dXnm1) for the previous network.Layers
	
	            // nomenclature:
	            //
	            // Err is output error of the entire neural net
	            // Xn is the output vector on the n-th layer
	            // Xnm1 is the output vector of the previous layer
	            // Wn is the vector of weights of the n-th layer
	            // Yn is the activation value of the n-th layer, i.e., the weighted sum of inputs BEFORE the squashing function is applied
	            // F is the squashing function: Xn = F(Yn)
	            // F' is the derivative of the squashing function
	            //   Conveniently, for F = tanh, then F'(Yn) = 1 - Xn^2, i.e., the derivative can be calculated from the output, without knowledge of the input

               int iSize = network.LayerCount;
               var dErr_wrt_dXlast = new double[network.Layers[network.LayerCount - 1].NeuronCount];
               var differentials = new List<double[]>(iSize);
	
	            // start the process by calculating dErr_wrt_dXn for the last layer.
	            // for the standard MSE Err function (i.e., 0.5*sumof( (actual-target)^2 ), this differential is simply
	            // the difference between the target and the actual

               /*TODO: Check potentially-changing upper bound expression "network.Layers[network.LayerCount - 1].NeuronCount" which is now called only *once*,
               to ensure the new Parallel.For call matches behavior in the original for-loop
              (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
               Parallel.For(0, network.Layers[network.LayerCount - 1].NeuronCount, ii =>
               {
                   dErr_wrt_dXlast[ii] = actualOutput[ii] - desiredOutput[ii];
               });
	
	
	            // store Xlast and reserve memory for the remaining vectors stored in differentials
	
	            
	           
	            for ( int ii=0; ii<iSize-1; ii++ )
	            {
                    var m_differential = new double[network.Layers[ii].NeuronCount];
                    /*TODO: Check potentially-changing upper bound expression "network.Layers[ii].NeuronCount" which is now called only *once*,
                    to ensure the new Parallel.For call matches behavior in the original for-loop
                   (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
                    Parallel.For(0, network.Layers[ii].NeuronCount, kk =>
                    {
                        m_differential[kk] = 0.0;
                    });
                    differentials.Add(m_differential);
                }
	            differentials.Add(dErr_wrt_dXlast);  // last one
	            // now iterate through all layers including the last but excluding the first, and ask each of
	            // them to backpropagate error and adjust their weights, and to return the differential
	            // dErr_wrt_dXnm1 for use as the input value of dErr_wrt_dXn for the next iterated layer
	
	            bool bMemorized = ( pMemorizedNeuronOutputs != null );
	            for ( int jj=iSize-1; jj>0;jj--)
	            {
		            if ( bMemorized != false )
		            {
			           network.Layers[jj].Backpropagate( differentials[ jj ], differentials[ jj - 1 ], 
				            pMemorizedNeuronOutputs[jj], pMemorizedNeuronOutputs[ jj - 1 ], EtaLearningRate );
		            }
		            else
		            {
                        network.Layers[jj].Backpropagate(differentials[jj], differentials[jj - 1], 
				            null, null, EtaLearningRate );
		            }
	            }

                differentials.Clear();
            }
            public void EraseHessianInformation()
            {
                foreach (var lit in network.Layers)
                {
                    lit.EraseHessianInformation();
                }
            }
            public void DivideHessianInformationBy(double divisor)
            {

                // controls each layer to divide its current diagonal Hessian info by a common divisor. 
                // A check is also made to ensure that each Hessian is strictly zero-positive

                foreach (var lit in network.Layers)
                {
                    lit.DivideHessianInformationBy(divisor);
                }
	
            }
            public void BackpropagateSecondDervatives(double[] actualOutputVector, double[] targetOutputVector, int outputCount)
            { 
                // calculates the second dervatives (for diagonal Hessian) and backpropagates
	            // them through neural net
	
		
	            if( network.LayerCount< 2 ){return;};  // there must be at least two layers in the net

                if ((actualOutputVector == null) || (targetOutputVector == null) || (outputCount >= 256))
                {
                    return;
                }
               
	            // we use nearly the same nomenclature as above (e.g., "dErr_wrt_dXnm1") even though everything here
	            // is actually second derivatives and not first derivatives, since otherwise the ASCII would 
	            // become too confusing.  To emphasize that these are second derivatives, we insert a "2"
	            // such as "d2Err_wrt_dXnm1".  We don't insert the second "2" that's conventional for designating
	            // second derivatives"

                int iSize = network.LayerCount;
                int neuronCount = network.Layers[network.LayerCount - 1].NeuronCount;
                var d2Err_wrt_dXlast = new double[neuronCount];
                var differentials = new List<double[]>(iSize);
	           
	
	            // start the process by calculating the second derivative dErr_wrt_dXn for the last layer.
	            // for the standard MSE Err function (i.e., 0.5*sumof( (actual-target)^2 ), this differential is 
	            // exactly one

                var lit = network.Layers.Last();  // point to last layer

                /*TODO: Check potentially-changing upper bound expression "lit.NeuronCount" which is now called only *once*,
                to ensure the new Parallel.For call matches behavior in the original for-loop
               (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
                Parallel.For(0, lit.NeuronCount,ParallelOption, ii =>
                {
                    d2Err_wrt_dXlast[ii] = 1.0;
                });	
	
	            // store Xlast and reserve memory for the remaining vectors stored in differentials
	
	
	            for ( int ii=0; ii<iSize-1; ii++ )
	            {
                    var m_differential = new double[network.Layers[ii].NeuronCount];
                    /*TODO: Check potentially-changing upper bound expression "network.Layers[ii].NeuronCount" which is now called only *once*,
                    to ensure the new Parallel.For call matches behavior in the original for-loop
                   (where this upper bound expression had previously been evaluated at the start of *every* loop iteration).*/
                    Parallel.For(0, network.Layers[ii].NeuronCount, kk =>
                    {
                        m_differential[kk] = 0.0;
                    });
                    differentials.Add(m_differential);
                   
	            }
	
	            differentials.Add(d2Err_wrt_dXlast);  // last one
	
	            // now iterate through all layers including the last but excluding the first, starting from
	            // the last, and ask each of
	            // them to backpropagate the second derviative and accumulate the diagonal Hessian, and also to
	            // return the second dervative
	            // d2Err_wrt_dXnm1 for use as the input value of dErr_wrt_dXn for the next iterated layer (which
	            // is the previous layer spatially)
	         
	            for ( int ii = iSize - 1; ii>0; ii--)
	            {
		            network.Layers[ii].BackpropagateSecondDerivatives( differentials[ ii ], differentials[ ii - 1 ] );
		        }
	
	            differentials.Clear();
            }
            protected void PeriodicWeightSanityCheck()
            {
                // fucntion that simply goes through all weights, and tests them against an arbitrary
                // "reasonable" upper limit.  If the upper limit is exceeded, a warning is displayed

                foreach (var lit in network.Layers)
                {
                    lit.PeriodicWeightSanityCheck();
                }
            }
            override public void Serialize(Archive ar)
            { 
                if (ar.IsStoring())
	            {
                    
		            // TODO: add storing code here
                    ar.Write(EtaLearningRate);
                    ar.Write(network.LayerCount);
                    foreach (var lit in network.Layers)
		            {
			            lit.Serialize( ar );
		            }
		 
	            }
	            else
	            {
		            // TODO: add loading code here
		
		            double eta; 
		            ar.Read(out eta);
		            EtaLearningRate = eta;  // two-step storage is needed since m_etaLearningRate is "volatile"
		
		            int nLayers;
                    var pLayer = (CommonLayer)null;

                    ar.Read(out nLayers);
                  
                    network.Layers = new CommonLayer[nLayers];
		            for ( int ii=0; ii<nLayers; ii++ )
		            {
			            pLayer = new CommonLayer( "", pLayer );
			
			            network.Layers[ii]=pLayer;
			            pLayer.Serialize( ar );
		            }
		
	            }
            }
    }
}

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)

About the Author

Vietdungiitb
Vietnam Maritime University
Vietnam Vietnam
No Biography provided

| Advertise | Privacy | Mobile
Web02 | 2.8.140721.1 | Last Updated 10 Apr 2013
Article Copyright 2013 by Vietdungiitb
Everything else Copyright © CodeProject, 1999-2014
Terms of Service
Layout: fixed | fluid