- threenetworks.zip
- ThreeNetworks
- .classpath
- .project
- .settings
- bin
- EncogXOR.class
- JOONEXOR.class
- NeurophXOR.class
- jar
- encog-core-2.4.0.jar
- JOCL-0.1.3a-beta.jar
- joone-engine.jar
- neuroph.jar
- slf4j-api-1.5.6.jar
- slf4j-jdk14-1.5.6.jar
- myMlPerceptron.nnet
- src
|
import org.encog.neural.activation.ActivationSigmoid;
import org.encog.neural.activation.ActivationTANH;
import org.encog.neural.data.NeuralData;
import org.encog.neural.data.NeuralDataPair;
import org.encog.neural.data.NeuralDataSet;
import org.encog.neural.data.basic.BasicNeuralDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.logic.FeedforwardLogic;
import org.encog.neural.networks.training.Train;
import org.encog.neural.networks.training.lma.LevenbergMarquardtTraining;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.networks.training.strategy.RequiredImprovementStrategy;
import org.encog.util.logging.Logging;
public class EncogXOR {
public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
{ 0.0, 1.0 }, { 1.0, 1.0 } };
public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
public static void main(final String args[]) {
Logging.stopConsoleLogging();
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,1));
network.setLogic(new FeedforwardLogic());
network.getStructure().finalizeStructure();
network.reset();
NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final Train train = new LevenbergMarquardtTraining(network, trainingSet);
// reset if improve is less than 1% over 5 cycles
train.addStrategy(new RequiredImprovementStrategy(5));
int epoch = 1;
do {
train.iteration();
System.out
.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
// test the neural network
System.out.println("Neural Network Results:");
for(NeuralDataPair pair: trainingSet ) {
final NeuralData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
}
}
|
By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.
If a file you wish to view isn't highlighted, and is a text file (not binary), please
let us know and we'll add colourisation support for it.
Hello, I am a student at Rutgers University. I am in computer science and am learning about machine learning and AI.