|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ANN.Perceptron.ArchiveSerialization;
using ANN.Perceptron.Common;
using System.Drawing;
using System.Threading;
using System.Threading.Tasks;
using ANN.Perceptron.Connections;
using ANN.Perceptron.Neurons;
using ANN.Perceptron.Weights;
namespace ANN.Perceptron.Layers
{
public class FullConnectedLayer: CommonLayer
{
public FullConnectedLayer(string sLabel, CommonLayer pPrev, int nNeurons)
{
label = sLabel;
prevLayer = pPrev;
weights = null;
neurons = new Neuron[nNeurons]; //add a internal unkwown output
neuronCount = nNeurons;
featureMapSize = new Size(1,neuronCount);
nFeatureMaps = 1;
weightCount = 0;
type = LayerTypes.FullyConnected;
floatingPointWarning = false;
ParallelOption = new ParallelOptions();
ParallelOption.TaskScheduler = null;
_maxDegreeOfParallelism = Environment.ProcessorCount;
ParallelOption.MaxDegreeOfParallelism = _maxDegreeOfParallelism;
}
//create layer
public override void Initialize()
{
floatingPointWarning = false;
CreateLayer();
}
protected override void CreateLayer()
{
// the final (output) layer:
// This layer is a fully-connected layer . Since it is fully-connected,
int iNumWeight;
var rdm = new Random();
if (neuronCount > 0 || neurons != null)
{ //clear neurons
neurons = null;
neuronCount = 0;
}
if (weightCount > 0 || weights != null)
{
//clear weights;
weights = null;
weightCount = 0;
}
if (prevLayer != null)
{
neuronCount = nFeatureMaps * featureMapSize.Width * featureMapSize.Height;
neurons = new Neuron[neuronCount];
for (int ii = 0; ii < neuronCount; ii++)
{
String lb = String.Format("Layer {0}, Neuron {1}", label, ii);
Neurons[ii] = new Neuron(lb);
}
rdm = new Random();
weightCount = neuronCount * (prevLayer.NeuronCount + 1);
weights = new Weight[weightCount];
for (int ii = 0; ii < weightCount; ii++)
{
String lb = String.Format("Layer {0}, Weight {1}", label, ii);
double initWeight = 0.05 * (2.0 * rdm.NextDouble() - 1.0);
weights[ii] = new Weight(lb, initWeight);
}
// Interconnections with previous layer: fully-connected
iNumWeight = 0; // weights are not shared in this layer
for (int jj = 0; jj < neuronCount; jj++)
{
var n = Neurons[jj];
int connCount = prevLayer.NeuronCount + 1;
n.ConnectionCount = connCount;
n.Connections = new Connection[connCount];
n.AddConnection((uint)NNDefinations.ULONG_MAX, (uint)iNumWeight, 0); // bias weight
for (int ii = 0; ii < prevLayer.NeuronCount; ii++)
{
n.AddConnection((uint)ii, (uint)iNumWeight++, ii + 1);
}
}
}
}
}
}
|
By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.
If a file you wish to view isn't highlighted, and is a text file (not binary), please
let us know and we'll add colourisation support for it.
This member has not yet provided a Biography. Assume it's interesting and varied, and probably something to do with programming.