实战教程:利用Java Encog类的典型应用示例
最编程
2024-02-29 19:10:10
...
实例1: setup
import org.encog.Encog; //导入依赖的package包/类
/**
* Setup to train the SVM.
*/
private void setup() {
this.currentConst = this.constBegin;
this.currentGamma = this.gammaBegin;
this.bestError = Double.POSITIVE_INFINITY;
this.isSetup = true;
if( this.currentGamma<=0 || this.currentGamma<Encog.DEFAULT_DOUBLE_EQUAL ) {
throw new EncogError("SVM search training cannot use a gamma value less than zero.");
}
if( this.currentConst<=0 || this.currentConst<Encog.DEFAULT_DOUBLE_EQUAL ) {
throw new EncogError("SVM search training cannot use a const value less than zero.");
}
if( this.gammaStep<0 ) {
throw new EncogError("SVM search gamma step cannot use a const value less than zero.");
}
if( this.constStep<0 ) {
throw new EncogError("SVM search const step cannot use a const value less than zero.");
}
}
实例2: main
import org.encog.Encog; //导入依赖的package包/类
public static void main(final String args[]) {
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
NEATPopulation pop = new NEATPopulation(2,1,1000);
pop.setInitialConnectionDensity(1.0);// not required, but speeds training
pop.reset();
CalculateScore score = new TrainingSetScore(trainingSet);
// train the neural network
final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
do {
train.iteration();
System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
} while(train.getError() > 0.01);
NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());
// test the neural network
System.out.println("Neural Network Results:");
EncogUtility.evaluate(network, trainingSet);
Encog.getInstance().shutdown();
}
实例3: activationFunction
import org.encog.Encog; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public final void activationFunction(final double[] x, final int start,
final int size) {
double sum = 0;
for (int i = start; i < start + size; i++) {
x[i] = BoundMath.exp(x[i]);
sum += x[i];
}
if(Double.isNaN(sum) || sum <Encog.DEFAULT_DOUBLE_EQUAL ) {
for (int i = start; i < start + size; i++) {
x[i] = 1.0/size;
}
} else {
for (int i = start; i < start + size; i++) {
x[i] = x[i] / sum;
}
}
}
实例4: main
import org.encog.Encog; //导入依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// create a neural network, without using a factory
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(null,true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
network.getStructure().finalizeStructure();
network.reset();
// create training data
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do {
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
train.finishTraining();
// test the neural network
System.out.println("Neural Network Results:");
for(MLDataPair pair: trainingSet ) {
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
Encog.getInstance().shutdown();
}