Skip to main content

fun with encog 2.1..sine wave predictor

Hi,
I've out of curiosity I've been playing around with encog.

http://www.heatonresearch.com/encog
http://code.google.com/p/encog-java/downloads/list

Here is a program which I borrowed from encog's website..

Here is the output from my program..

-0->-0.17364 -1->0.0 -2->0.17364 -3->0.34202 -4->0.5 =0=>0.64278 >>0.6662971234778998 
-0->0.0 -1->0.17364 -2->0.34202 -3->0.5 -4->0.64278 =0=>0.76604 >>0.8159551242229075 
-0->0.17364 -1->0.34202 -2->0.5 -3->0.64278 -4->0.76604 =0=>0.86602 >>0.8951268774699124 
-0->0.34202 -1->0.5 -2->0.64278 -3->0.76604 -4->0.86602 =0=>0.93969 >>0.9265601010306325 
-0->0.5 -1->0.64278 -2->0.76604 -3->0.86602 -4->0.93969 =0=>0.9848 >>0.9349698519994352 
-0->0.64278 -1->0.76604 -2->0.86602 -3->0.93969 -4->0.9848 =0=>1.0 >>0.9310725862692986 
-0->0.76604 -1->0.86602 -2->0.93969 -3->0.9848 -4->1.0 =0=>0.9848 >>0.9171842607193943 
-0->0.86602 -1->0.93969 -2->0.9848 -3->1.0 -4->0.9848 =0=>0.93969 >>0.8907604772439968 


package com.linuxclicks.neural.examples.encog.tests.sine;

import java.util.List;

import org.encog.neural.activation.ActivationTANH;
import org.encog.neural.data.NeuralData;
import org.encog.neural.data.NeuralDataPair;
import org.encog.neural.data.temporal.TemporalDataDescription;
import org.encog.neural.data.temporal.TemporalNeuralDataSet;
import org.encog.neural.data.temporal.TemporalPoint;
import org.encog.neural.data.temporal.TemporalDataDescription.Type;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.anneal.NeuralSimulatedAnnealing;
import org.encog.neural.networks.training.anneal.NeuralTrainingSetSimulatedAnnealing;
import org.encog.neural.networks.training.propagation.back.Backpropagation;


public class Predictor
{

public final static int   ACTUAL_SIZE   = 500;
public final static int   TRAINING_SIZE  = 250;
public final static int   INPUT_SIZE   = 5;
public final static int   OUTPUT_SIZE   = 1;

public final static int   NEURONS_HIDDEN_1 = 7;
public final static boolean  USE_BACKPROP  = true;

private BasicNetwork   network;

//
// actual data used for the training and validation
//
private Data     data;
private TemporalNeuralDataSet temporalDataSet;
private TemporalNeuralDataSet validationDataSet;
private double     input[][];
private double     ideal[][];

public static void main(String[] args)
{
Predictor predictor = new Predictor();
predictor.generateData();
predictor.createNetwork();
predictor.generateTrainingSets();
predictor.generateValidationTrainingSets();
// predictor.trainNetworkBackprop();
predictor.trainNetworkAnneal();
predictor.display();
}

private void generateData()
{
//
// assign the 'Data' class to a member variable..
//
this.data = new Data(Predictor.ACTUAL_SIZE, Predictor.INPUT_SIZE, Predictor.OUTPUT_SIZE);
}

public void createNetwork()
{
this.network = new BasicNetwork();

this.network.addLayer(new BasicLayer(new ActivationTANH(), true, Predictor.INPUT_SIZE));
this.network.addLayer(new BasicLayer(new ActivationTANH(), true, Predictor.NEURONS_HIDDEN_1));
this.network.addLayer(new BasicLayer(new ActivationTANH(), true, OUTPUT_SIZE));

this.network.getStructure().finalizeStructure();
// NetworkCODEC.arrayToNetwork(Predictor.RANDOM_NET, this.network);

}

private void generateTrainingSets()
{
this.temporalDataSet = new TemporalNeuralDataSet(Predictor.INPUT_SIZE, Predictor.OUTPUT_SIZE);

//
// create an array which hold have the values 250 for taining..and 5
// values used to predict the next output..
//
// for each index..hold the inputs in one array
// and for the output in another array
//
this.input = new double[Predictor.TRAINING_SIZE][Predictor.INPUT_SIZE];
this.ideal = new double[Predictor.TRAINING_SIZE][Predictor.OUTPUT_SIZE];

this.temporalDataSet.addDescription(new TemporalDataDescription(Type.RAW, true, false));
this.temporalDataSet.addDescription(new TemporalDataDescription(Type.RAW, false, true));

for (int i = 0; i < Predictor.TRAINING_SIZE; i++)
{
TemporalPoint tp = this.temporalDataSet.createPoint(i);
tp.setData(0, this.data.actual[i]);
tp.setData(1, this.data.actual[i]);
}

this.temporalDataSet.generate();

}
private void generateValidationTrainingSets()
{
this.validationDataSet = new TemporalNeuralDataSet(Predictor.INPUT_SIZE, Predictor.OUTPUT_SIZE);

//
// create an array which hold have the values 250 for taining..and 5
// values used to predict the next output..
//
// for each index..hold the inputs in one array
// and for the output in another array
//
this.input = new double[Predictor.TRAINING_SIZE][Predictor.INPUT_SIZE];
this.ideal = new double[Predictor.TRAINING_SIZE][Predictor.OUTPUT_SIZE];

this.validationDataSet.addDescription(new TemporalDataDescription(Type.RAW, true, false));
this.validationDataSet.addDescription(new TemporalDataDescription(Type.RAW, false, true));

for (int i = Predictor.TRAINING_SIZE; i < Predictor.ACTUAL_SIZE; i++)
{
TemporalPoint tp = this.validationDataSet.createPoint(i);
tp.setData(0, this.data.actual[i]);
tp.setData(1, this.data.actual[i]);
}

this.validationDataSet.generate();

}

private void trainNetworkBackprop()
{
final Backpropagation train = new Backpropagation(this.network, this.temporalDataSet, 0.001, 0.1);

int epoch = 1;

do
{
train.iteration();
System.out.println("Iteration #" + epoch + " Error:" + train.getError());
epoch++;
}
while ((epoch < 5000) && (train.getError() > 0.03));
}

private void trainNetworkAnneal()
{
NeuralSimulatedAnnealing train = new NeuralTrainingSetSimulatedAnnealing(this.network, this.temporalDataSet,
10, 2, 100);

int epoch = 1;

do
{
train.iteration();
System.out.println("Iteration #" + epoch + " Error:" + train.getError());
epoch++;
}
while ((train.getError() > 0.04));
}

private void display()
{

List dataPair = this.validationDataSet.getData();
for(NeuralDataPair dp : dataPair)
{
NeuralData inputData = dp.getInput();
for (int x = 0; x < inputData.size(); x++)
{
System.out.print(" -" + x + "->" + inputData.getData(x));
}
NeuralData idealData = dp.getIdeal();
for (int x = 0; x < idealData.size(); x++)
{
System.out.print(" =" + x + "=>" + idealData.getData(x));
}


NeuralData predictedNd = this.network.compute(inputData);

for(int y = 0; y < predictedNd.size(); y++)
{
System.out.print(" >>" + predictedNd.getData(y));
}   

System.out.println(" ");
}
}
}



package com.linuxclicks.neural.examples.encog.tests.sine;

/**
* the data used for the training and validation.. being able to grab certain
* segments of the data..
* 
* @author Terry
* 
*/
public class Data
{
//
// the total number of elements in the data..
//
public final double actual[];

//
// retrieve the calculation of the curve at any particular point..
//
public static double calculateSine(final double deg)
{
final double rad = deg * (Math.PI / 180);
final double result = Math.sin(rad);
return ((int) (result * 100000.0)) / 100000.0;
}

//
// create all the actual data which will be used in all the training
// and validation
//
public Data(final int size, final int inputSize, final int outputSize)
{
//
// make an array for which to hold every element in the training and
// validation
// steps
//
this.actual = new double[size];


//
// the starting position of the sine wave line..
//
int angle = 0;
//
// go through the number of elements in the all acutal data
// and place it into an array..
//
for (int i = 0; i < this.actual.length; i++)
{
//
// save the value in the actual data array
//
this.actual[i] = Data.calculateSine(angle);
//
// update the angle for the next iteration
//
angle += 10;
}
}

}

Comments

Popular posts from this blog

clonezilla - creating a wifi ad-hoc hotspot and running a ssh server

Here are some notes on connecting to a wifi-hotspot from a running clonezilla
live-cd.

[ target machine to save a backup hard-disk clone/image ]
[ target machine is also running a wifi hotspot and an ssh server ]
Ubuntu 11.04 - Natty Narwhal

terry@terry-HP:~$ uname -a
Linux terry-HP 2.6.38-10-generic #46-Ubuntu SMP Tue Jun 28 15:07:17 UTC 2011 x86_64 x86_64 x86_64 GNU/Linux
terry@terry-HP:~$

[ Creating an 'ad-hoc' wifi spot ]
Click the 'up down arrow' icon ( or what have you )


[ choose menu item to create a wifi ad-hoc hotspot ]

[ create the ad-hoc wifi hotspot ]

[ the eth1 connection is the created ad-hoc network with an essid of 'terry' ]

[ the ad-hoc wifi hotspot is now visible on the 'host' computer and other computer as well now ]

[ enter the following on the machine being cloned with clonezilla
[ at the appropriate place in the clonezilla backup image step ]
terry@terry-HP:~$ ip link set wlan0 down
terry@terry-HP:~$ iwconfig wlan0 mode ad-hoc
terry@…

Translators, maps, conduits, and containers

Interpreters
The Amiga had a hardware emulator which transformed instruction to instruction to a dedicated x86 hardware interpreter in the Amiga could run Microsoft DOS spreadsheet programs in the Amiga OS.

MS-DOS on Amiga via Sidecar or Bridgeboard[edit]
MS-DOS compatibility was a major issue during the early years of the Amiga's lifespan in order to promote the machine as a serious business machine. In order to run the MS-DOS operating system, Commodore released the Sidecar for the Amiga 1000, basically a 8088 board in a closed case that connected to the side of the Amiga. Clever programming (a library named Janus, after the two-faced Roman god of doorways) made it possible to run PC software in an Amiga window without use of emulation. At the introduction of the Sidecar the crowd was stunned to see the MS-DOS version of Microsoft Flight Simulator running at full speed in an Amiga window on the Workbench.

Later the Sidecar was implemented on an expansion card named "Bridgebo…

Use Gwenview to upload images to picasaweb.google.com

Use Gwenview to upload images to https://picasaweb.google.com/

install the following KDE4 plugins:

terry@narwhal:~/download$ sudo apt-get install kipi-plugins
From Linux Clicks...
the Gwenview application...

From Linux Clicks...