Hello World

From Encog Machine Learning Framework
Jump to: navigation, search

Contents

Java Version

import org.encog.Encog;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataPair;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
 
/**
 * XOR: This example is essentially the "Hello World" of neural network
 * programming.  This example shows how to construct an Encog neural
 * network to predict the output from the XOR operator.  This example
 * uses backpropagation to train the neural network.
 * 
 * This example attempts to use a minimum of Encog features to create and
 * train the neural network.  This allows you to see exactly what is going
 * on.  For a more advanced example, that uses Encog factories, refer to
 * the XORFactory example.
 * 
 */
public class XORHelloWorld {
 
	/**
	 * The input necessary for XOR.
	 */
	public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
			{ 0.0, 1.0 }, { 1.0, 1.0 } };
 
	/**
	 * The ideal data necessary for XOR.
	 */
	public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
 
	/**
	 * The main method.
	 * @param args No arguments are used.
	 */
	public static void main(final String args[]) {
 
		// create a neural network, without using a factory
		BasicNetwork network = new BasicNetwork();
		network.addLayer(new BasicLayer(null,true,2));
		network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
		network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
		network.getStructure().finalizeStructure();
		network.reset();
 
		// create training data
		MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
 
		// train the neural network
		final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
 
		int epoch = 1;
 
		do {
			train.iteration();
			System.out.println("Epoch #" + epoch + " Error:" + train.getError());
			epoch++;
		} while(train.getError() > 0.01);
		train.finishTraining();
 
		// test the neural network
		System.out.println("Neural Network Results:");
		for(MLDataPair pair: trainingSet ) {
			final MLData output = network.compute(pair.getInput());
			System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
					+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
		}
 
		Encog.getInstance().shutdown();
	}
}


C# Version

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Layers;
using Encog.Engine.Network.Activation;
using Encog.ML.Data;
using Encog.Neural.Networks.Training.Propagation.Resilient;
using Encog.ML.Train;
using Encog.ML.Data.Basic;
using Encog;
 
namespace EncogExample
{
    public class Program
    {
 
        /// <summary>
        /// Input for the XOR function.
        /// </summary>
        public static double[][] XORInput = {
            new[] {0.0, 0.0},
            new[] {1.0, 0.0},
            new[] {0.0, 1.0},
            new[] {1.0, 1.0}
            };
 
        /// <summary>
        /// Ideal output for the XOR function.
        /// </summary>
        public static double[][] XORIdeal = {
            new[] {0.0},
            new[] {1.0},
            new[] {1.0},
            new[] {0.0}
        };
 
        static void Main(string[] args)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();
 
            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
 
            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);
 
            int epoch = 1;
 
            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);
 
            train.FinishTraining();
 
            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
 
            EncogFramework.Instance.Shutdown();
        }
    }
}

C/C++ Version

#include "encog.h"
 
int main(int argc, char* argv[])
{
	char line[MAX_STR];
    int i;
    REAL *input,*ideal;
    REAL output[1];
    float error;
    ENCOG_DATA *data;
    ENCOG_NEURAL_NETWORK *net;
	ENCOG_OBJECT *trainer;
	ENCOG_TRAINING_REPORT *report;
 
	EncogInit();
 
/* Load the data for XOR */
    data = EncogDataCreate(2, 1, 4);
	EncogErrorCheck();
    EncogDataAdd(data,"0,0,  0");
    EncogDataAdd(data,"1,0,  1");
    EncogDataAdd(data,"0,1,  1");
    EncogDataAdd(data,"1,1,  0");
 
/* Create a 3 layer neural network, with sigmoid transfer functions and bias */
 
    net = EncogNetworkFactory("basic", "2:B->SIGMOID->2:B->SIGMOID->1", 0,0);
	EncogErrorCheck();
 
	//EncogHashPut(encogContext.config,PARAM_TRAIN,"RPROP");
	trainer = EncogTrainNew(net,data);
	EncogErrorCheck();
 
	report = EncogTrainReport(trainer);
	EncogErrorCheck();
 
/* Begin training, report progress. */	
	report->maxError = 0.00f;
	report->maxIterations = 500;
	report->updateSeconds = 1;
	report->maxError = (float)0.01;
 
	EncogTrainRun(trainer,net);
 
 
	/* Obtain the SSE error, display it */
    error = EncogErrorSSE(net, data);
    *line = 0;
    EncogStrCatStr(line,"Error: ",MAX_STR);
    EncogStrCatDouble(line,(double)error,4,MAX_STR);
    puts(line);
 
	/* Display results */
	/* Display the results from the neural network, see if it learned anything */
    printf("\nResults:\n");
    for(i=0; i<4; i++)
    {
        input = EncogDataGetInput(data,i);
        ideal = EncogDataGetIdeal(data,i);
        EncogNetworkCompute(net,input,output);
        *line = 0;
        EncogStrCatStr(line,"[",MAX_STR);
        EncogStrCatDouble(line,input[0],8,MAX_STR);
        EncogStrCatStr(line," ",MAX_STR);
        EncogStrCatDouble(line,input[1],8,MAX_STR);
        EncogStrCatStr(line,"] = ",MAX_STR);
        EncogStrCatDouble(line,output[0],8,MAX_STR);
		EncogStrCatStr(line," (ideal=", MAX_STR);
        EncogStrCatDouble(line,ideal[0],8,MAX_STR);
		EncogStrCatStr(line," )", MAX_STR);
        puts(line);
    }
 
/* Delete the neural network */
    EncogObjectFree(net);
	EncogErrorCheck();
}

HTML5/Javascript Version

<!DOCTYPE html>
<html>
<head>
    <title></title>
</head>
<body>
Hello World
<div id="out"> </div>
<h3>Network JSON</h3>
<script src="../encog.js"></script>
<script src="../encog-widget.js"></script>
<script type="text/javascript">
 
    var XOR_INPUT = [
        [0,0],
        [1,0],
        [0,1],
        [1,1]
    ];
 
    var XOR_IDEAL = [
        [0],
        [1],
        [1],
        [0]
    ];
 
    var con = ENCOG.GUI.Console.create('out');
    con.writeLine('One');
    con.writeLine('Two');
    con.writeLine('Three');
 
    var network = ENCOG.BasicNetwork.create( [
        ENCOG.BasicLayer.create(ENCOG.ActivationSigmoid.create(),2,1),
        ENCOG.BasicLayer.create(ENCOG.ActivationSigmoid.create(),3,1),
        ENCOG.BasicLayer.create(ENCOG.ActivationSigmoid.create(),1,0)] );
    network.randomize();
 
    var train = ENCOG.PropagationTrainer.create(network,XOR_INPUT,XOR_IDEAL,"RPROP",0,0);
 
    var iteration = 1;
 
    do
    {
        train.iteration();
        var str = "Training Iteration #" + iteration + ", Error: " + train.error;
        con.writeLine(str);
        iteration++;
    } while( iteration<1000 && train.error>0.01);
 
    var input = [0,0];
    var output = new Array(1);
 
    con.writeLine("Testing neural network");
    for(var i=0;i<XOR_INPUT.length;i++)
    {
        network.compute(XOR_INPUT[i],output);
        var str = "Input: " + String(XOR_INPUT[i][0])
                + " ; " + String(XOR_INPUT[i][1])
                + "   Output: " + String(output[0])
                + "   Ideal: " + String(XOR_IDEAL[i][0]);
        con.writeLine(str);
    }
 
</script>
 
 
</body>
</html>