Click here to Skip to main content
Click here to Skip to main content
Add your own
alternative version

Benchmarking and Comparing Encog, Neuroph and JOONE Neural Networks

, 3 Jun 2010 LGPL3
I compare the performance of Encog, Neuroph and JOONE
BenchmarkNN.zip
benchmark
encog-core-2.4.0.jar
joone-engine.jar
neuroph.jar
slf4j-api-1.5.6.jar
slf4j-jdk14-1.5.6.jar
import java.util.concurrent.CountDownLatch;

import org.joone.engine.FullSynapse;
import org.joone.engine.LinearLayer;
import org.joone.engine.Monitor;
import org.joone.engine.NeuralNetEvent;
import org.joone.engine.NeuralNetListener;
import org.joone.engine.SigmoidLayer;
import org.joone.engine.learning.TeachingSynapse;
import org.joone.io.MemoryInputSynapse;
import org.joone.net.NeuralNet;

public class BenchmarkJOONE implements Benchmarkable, NeuralNetListener {

	private NeuralNet nnet;
	private CountDownLatch latch = new CountDownLatch(1);
	private Monitor monitor;
	private boolean singleThread;

	public BenchmarkJOONE(boolean singleThread)
	{
		this.singleThread = singleThread;
	}


	@Override
	public void cicleTerminated(NeuralNetEvent arg0) {
		// TODO Auto-generated method stub

	}

	@Override
	public void errorChanged(NeuralNetEvent arg0) {
		// TODO Auto-generated method stub

	}

	@Override
	public void netStarted(NeuralNetEvent arg0) {
		// TODO Auto-generated method stub

	}

	@Override
	public void netStopped(NeuralNetEvent arg0) {
		this.latch.countDown();

	}

	@Override
	public void netStoppedError(NeuralNetEvent arg0, String arg1) {
		// TODO Auto-generated method stub

	}


	@Override
	public double benchmark(int iterations) {
		monitor.setTotCicles(Benchmark.ITERATIONS);	// How many times the net must be trained on the input patterns
		try {
			nnet.go(); // The net starts in async mode
			this.latch.await();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}

		return 0;
	}


	@Override
	public void prepareBenchmark(double[][] input, double[][] ideal) {
		// Firts, creates the three Layers
        LinearLayer	inputLayer = new LinearLayer();
        SigmoidLayer	hiddenLayer = new SigmoidLayer();
        SigmoidLayer	outputLayer = new SigmoidLayer();

        inputLayer.setLayerName("input");
        hiddenLayer.setLayerName("hidden");
        outputLayer.setLayerName("output");

        // sets their dimensions
        inputLayer.setRows(Benchmark.INPUT_COUNT);
        hiddenLayer.setRows(Benchmark.HIDDEN_COUNT);
        outputLayer.setRows(Benchmark.OUTPUT_COUNT);

        // Now create the two Synapses
        FullSynapse synapse_IH = new FullSynapse();	/* input -> hidden conn. */
        FullSynapse synapse_HO = new FullSynapse();	/* hidden -> output conn. */

        synapse_IH.setName("IH");
        synapse_HO.setName("HO");

        // Connect the input layer whit the hidden layer
        inputLayer.addOutputSynapse(synapse_IH);
        hiddenLayer.addInputSynapse(synapse_IH);

        // Connect the hidden layer whit the output layer
        hiddenLayer.addOutputSynapse(synapse_HO);
        outputLayer.addInputSynapse(synapse_HO);

        MemoryInputSynapse  inputStream = new MemoryInputSynapse();

        // The first two columns contain the input values
        inputStream.setInputArray(input);
        inputStream.setAdvancedColumnSelector("1,2,3,4,5,6,7,8,9,10");

        // set the input data
        inputLayer.addInputSynapse(inputStream);

        TeachingSynapse trainer = new TeachingSynapse();

        // Setting of the file containing the desired responses provided by a FileInputSynapse
        MemoryInputSynapse samples = new MemoryInputSynapse();


        // The output values are on the third column of the file
        samples.setInputArray(ideal);
        samples.setAdvancedColumnSelector("1,2,3,4,5,6,7,8,9,10");
        trainer.setDesired(samples);

        // Connects the Teacher to the last layer of the net
        outputLayer.addOutputSynapse(trainer);

        // Creates a new NeuralNet
        this.nnet = new NeuralNet();
        /*
         * All the layers must be inserted in the NeuralNet object
         */
        nnet.addLayer(inputLayer, NeuralNet.INPUT_LAYER);
        nnet.addLayer(hiddenLayer, NeuralNet.HIDDEN_LAYER);
        nnet.addLayer(outputLayer, NeuralNet.OUTPUT_LAYER);
        monitor = nnet.getMonitor();
        monitor.setTrainingPatterns(input.length);	// # of rows (patterns) contained in the input file
        monitor.setLearningRate(Benchmark.LEARNING_RATE);
        monitor.setMomentum(Benchmark.MOMENTUM);
        monitor.setLearning(true);	// The net must be trained
        monitor.setSingleThreadMode(this.singleThread);  // Set to false for multi-thread mode
        /* The application registers itself as monitor's listener so it can receive
          the notifications of termination from the net. */
        monitor.addNeuralNetListener(this);
	}
}

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The GNU Lesser General Public License (LGPLv3)

Share

About the Author

taheretaheri
Other Rutgers University
United States United States
Hello, I am a student at Rutgers University. I am in computer science and am learning about machine learning and AI.

| Advertise | Privacy | Mobile
Web03 | 2.8.141029.1 | Last Updated 3 Jun 2010
Article Copyright 2010 by taheretaheri
Everything else Copyright © CodeProject, 1999-2014
Terms of Service
Layout: fixed | fluid