1- /*
2- * Encog(tm) Core v3.4 - Java Version
3- * http://www.heatonresearch.com/encog/
4- * https://github.com/encog/encog-java-core
5-
6- * Copyright 2008-2017 Heaton Research, Inc.
7- *
8- * Licensed under the Apache License, Version 2.0 (the "License");
9- * you may not use this file except in compliance with the License.
10- * You may obtain a copy of the License at
11- *
12- * http://www.apache.org/licenses/LICENSE-2.0
13- *
14- * Unless required by applicable law or agreed to in writing, software
15- * distributed under the License is distributed on an "AS IS" BASIS,
16- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17- * See the License for the specific language governing permissions and
18- * limitations under the License.
19- *
20- * For more information on Heaton Research copyrights, licenses
21- * and trademarks visit:
22- * http://www.heatonresearch.com/copyright
23- */
241package org .encog ;
25-
26- import org .encog .Encog ;
2+ import org . encog . engine . network . activation . ActivationLinear ;
3+ import org .encog .engine . network . activation . ActivationReLU ;
274import org .encog .engine .network .activation .ActivationSigmoid ;
285import org .encog .ml .data .MLData ;
296import org .encog .ml .data .MLDataPair ;
307import org .encog .ml .data .MLDataSet ;
318import org .encog .ml .data .basic .BasicMLDataSet ;
32- import org .encog .ml .importance .PerturbationFeatureImportanceCalc ;
339import org .encog .neural .networks .BasicNetwork ;
3410import org .encog .neural .networks .layers .BasicLayer ;
3511import org .encog .neural .networks .training .propagation .resilient .ResilientPropagation ;
36- import org .encog .neural .networks .training .propagation .sgd .StochasticGradientDescent ;
37- import org .encog .neural .networks .training .propagation .sgd .update .AdaGradUpdate ;
38- import org .encog .neural .networks .training .propagation .sgd .update .NesterovUpdate ;
39- import org .encog .neural .networks .training .propagation .sgd .update .RMSPropUpdate ;
40- import org .encog .neural .pattern .ElmanPattern ;
41-
4212
13+ /**
14+ * XOR: This example is essentially the "Hello World" of neural network
15+ * programming. This example shows how to construct an Encog neural
16+ * network to predict the output from the XOR operator. This example
17+ * uses backpropagation to train the neural network.
18+ *
19+ * This example attempts to use a minimum of Encog features to create and
20+ * train the neural network. This allows you to see exactly what is going
21+ * on. For a more advanced example, that uses Encog factories, refer to
22+ * the XORFactory example.
23+ *
24+ */
4325public class Test {
4426
27+ /**
28+ * The input necessary for XOR.
29+ */
30+ public static double XOR_INPUT [][] = { { 0.0 , 0.0 }, { 1.0 , 0.0 },
31+ { 0.0 , 1.0 }, { 1.0 , 1.0 } };
32+
33+ /**
34+ * The ideal data necessary for XOR.
35+ */
36+ public static double XOR_IDEAL [][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
37+
4538 /**
4639 * The main method.
4740 * @param args No arguments are used.
4841 */
4942 public static void main (final String args []) {
5043
51- ElmanPattern elmanPat = new ElmanPattern ();
52- elmanPat .setInputNeurons (5 );
53- elmanPat .addHiddenLayer (5 );
54- elmanPat .setOutputNeurons (1 );
55- BasicNetwork network = (BasicNetwork ) elmanPat .generate ();
56- System .out .println (network .toString ());
44+ // create a neural network, without using a factory
45+ BasicNetwork network = new BasicNetwork ();
46+ network .addLayer (new BasicLayer (new ActivationReLU (),true ,2 ));
47+ network .addLayer (new BasicLayer (new ActivationSigmoid (),true ,3 ));
48+ network .addLayer (new BasicLayer (new ActivationLinear (),false ,1 ));
49+ network .getStructure ().finalizeStructure ();
50+ network .reset ();
51+
52+ // create training data
53+ MLDataSet trainingSet = new BasicMLDataSet (XOR_INPUT , XOR_IDEAL );
54+
55+ // train the neural network
56+ final ResilientPropagation train = new ResilientPropagation (network , trainingSet );
57+
58+ int epoch = 1 ;
59+
60+ do {
61+ train .iteration ();
62+ System .out .println ("Epoch #" + epoch + " Error:" + train .getError ());
63+ epoch ++;
64+ } while (train .getError () > 0.01 );
65+ train .finishTraining ();
66+
67+ // test the neural network
68+ System .out .println ("Neural Network Results:" );
69+ for (MLDataPair pair : trainingSet ) {
70+ final MLData output = network .compute (pair .getInput ());
71+ System .out .println (pair .getInput ().getData (0 ) + "," + pair .getInput ().getData (1 )
72+ + ", actual=" + output .getData (0 ) + ",ideal=" + pair .getIdeal ().getData (0 ));
73+ }
74+
75+ Encog .getInstance ().shutdown ();
5776 }
58- }
77+ }
0 commit comments