This commit is contained in:
xdrm-brackets 2016-10-29 15:47:32 +02:00
parent 1c7f4e6566
commit 6826434e07
2 changed files with 93 additions and 90 deletions

View File

@ -17,6 +17,7 @@
private $numNeu; // Number of neurons for each hidden layer
private $inpNeu; // Number of input neurons
private $outNeu; // Number of output neurons
private $antReg; // Whether anti-regression option is enabled
private $storage; // path to storage
private $callback; // callback training function
@ -24,7 +25,8 @@
/************************************************
**** LOCAL ATTRIBUTES ****
************************************************/
private $maxFit; // Maximum fitness of the previous generation
public $maxFit; // Maximum fitness of the previous generation
public $minFit; // Minimum fitness of the current generation
public $gnr; // Current generation index
public $gnm; // Current genome index
private $genome; // Current genome instance
@ -121,6 +123,7 @@
$this->setHiddenLayerNeuronsCount($default['layer_neurons']); // default value
$this->max = null; // default value
$this->setStorage($default['storage']); // default value
$this->setAntiRegression(false); // default value
$this->callback = function(){}; // default value
}
@ -243,6 +246,19 @@
$this->outNeu = $outNeu;
}
/* SETS THE ANTI-REGRESSION OPTION
*
* @active<Boolean> Whether to enable or disable this option
*
*/
public function setAntiRegression($active=null){
/* (1) Checks @active argument */
if( !is_bool($active) )
throw new \Exception('Wrong argument for anti-regression enabling/disabling.');
/* (2) Stores it */
$this->antReg = $active;
}
/************************************************
**** Sample Setters ****
@ -354,6 +370,7 @@
/* (1) Initializes data & storage */
$this->gnr = 0;
$this->gnm = 0;
$this->minFit = null;
$this->maxFit = null;
FileManager::write($this->storage['gn']['filename'], '');
FileManager::write($this->storage['ft']['filename'], '');
@ -382,6 +399,7 @@
/* (1) Initializes data & storage */
$this->gnr = 0;
$this->gnm = 0;
$this->minFit = null;
$this->maxFit = null;
FileManager::write($this->storage['gn']['filename'], '');
FileManager::write($this->storage['ft']['filename'], '');
@ -484,11 +502,10 @@
$ftRead = FileManager::read($this->storage['ft']['filename']);
$fitnesses = explode("\n", trim($ftRead) );
/* (3) Checks if fitnessEnd is reached */
$fitnessEnd = max($fitnesses) == $this->fitEnd;
/* (3.1) Stop process and store learned data */
if( $fitnessEnd ){
/* (3) Checks if fitnessEnd is reached */
if( min($fitnesses) == $this->fitEnd ){
/* (1) Get the 2 best genomes */
$best = FileManager::readline($this->storage['gn']['filename'], 0);
$best.= FileManager::readline($this->storage['gn']['filename'], 1);
@ -504,9 +521,8 @@
}
/* (4) Checks if theres a fitness evolution */
$fitnessEvolution = is_null($this->maxFit) || max($fitnesses) > $this->maxFit;
/* (4) Checks if theres a fitness maximum evolution */
$fitnessEvolution = !$this->antReg || is_null($this->maxFit) && is_null($this->minFit) || max($fitnesses) > $this->maxFit;
/* (4.1) If evolution -> choose best + cross-over ... */
if( $fitnessEvolution ){
@ -526,6 +542,9 @@
$mother->unserialize($sMother);
$this->maxFit = max($fitnesses);
$this->minFit = min($fitnesses);
$this->storeLearntBest();
/* (4.2) If regression -> renew generation */
}else{
@ -572,12 +591,7 @@
---------------------------------------------------------*/
}else{
/* (1) Get the 2 best genomes */
$best = FileManager::readline($this->storage['gn']['filename'], 0);
$best.= FileManager::readline($this->storage['gn']['filename'], 1);
/* (2) Stores data to learnt data */
FileManager::write($this->storage['ln']['filename'], $best);
$this->storeLearntBest();
/* (3) Destroy cursors */
$this->gnr = null;
@ -586,6 +600,17 @@
}
}
/* STORES THE 2 BEST GENOMES OF THE GENERATION
*
*/
private function storeLearntBest(){
/* (1) Get the 2 best genomes */
$best = FileManager::readline($this->storage['gn']['filename'], 0);
$best.= FileManager::readline($this->storage['gn']['filename'], 1);
/* (2) Stores data to learnt data */
FileManager::write($this->storage['ln']['filename'], $best);
}
// TODO: Manage @mutThr decreasing to be more precise
/************************************************
@ -650,6 +675,7 @@
$json['numNeu'] = $this->numNeu;
$json['inpNeu'] = $this->inpNeu;
$json['outNeu'] = $this->outNeu;
$json['antReg'] = $this->antReg;
$json['storage'] = $this->storage;
@ -679,6 +705,7 @@
$this->numNeu = $json['numNeu'];
$this->inpNeu = $json['inpNeu'];
$this->outNeu = $json['outNeu'];
$this->antReg = $json['antReg'];
$this->storage = $json['storage'];
}

View File

@ -6,14 +6,13 @@
use \neuralnetwork\core\NeuralNetwork;
use \filemanager\core\FileManager;
function behaviour($abc){
return [($abc[0] + $abc[1] - $abc[2])];
}
function behaviourtest1($in){ return [$in[0] + $in[1] - $in[2]]; }
function behaviourtest2($in){ return [ 2*pow($in[0], 2) - 5*$in[1] + 8*$in[2]]; }
$train = false;
$train = true;
$guess = !$train;
if( $train && 'learning_process' ){
@ -28,55 +27,56 @@
=========================================================*/
try{
$nn = NeuralNetwork::load('test1/test1');
echo "$part. NeuralNetwork loaded from 'test1/test1'\n"; $part++;
$nn = NeuralNetwork::load('test2/test2');
echo "$part. NeuralNetwork loaded from 'test2/test2'\n"; $part++;
/* [2] Else, creates it
=========================================================*/
}catch(\Exception $e){
$nn = NeuralNetwork::create(50, 100);
$nn = NeuralNetwork::create(50, 500);
$nn->setHiddenLayersCount(3); // 3 Hidden layers
$nn->setHiddenLayerNeuronsCount(3); // Composed with 3 neurons each
$nn->setHiddenLayerNeuronsCount(4); // Composed with 3 neurons each
$nn->setInputLayerCount(3); // 3 inputs
$nn->setOutputLayerCount(1); // 1 output
$nn->setMutationThreshold(0.3); // mutation 30% each generation
$nn->setFitnessEnd(0); // Algorith is done when fitness reaches 0
$nn->setFitnessEnd(-1.5); // Algorithm is done when fitness reaches 0
$nn->setAntiRegression(true); // That repeats a generation while its fitness is lower than the previous one
echo "$part. NeuralNetwork configured\n"; $part++;
$d = [0, 0, 0]; $nn->addSample($d, behaviour($d));
$d = [0, 0, 1]; $nn->addSample($d, behaviour($d));
$d = [0, 1, 0]; $nn->addSample($d, behaviour($d));
$d = [0, 1, 1]; $nn->addSample($d, behaviour($d));
$d = [1, 0, 0]; $nn->addSample($d, behaviour($d));
$d = [1, 0, 1]; $nn->addSample($d, behaviour($d));
$d = [1, 1, 0]; $nn->addSample($d, behaviour($d));
$d = [1, 1, 1]; $nn->addSample($d, behaviour($d));
$d = [0, 0, 0]; $nn->addSample($d, behaviour($d));
$d = [0, 0, 2]; $nn->addSample($d, behaviour($d));
$d = [0, 2, 0]; $nn->addSample($d, behaviour($d));
$d = [0, 2, 2]; $nn->addSample($d, behaviour($d));
$d = [2, 0, 0]; $nn->addSample($d, behaviour($d));
$d = [2, 0, 2]; $nn->addSample($d, behaviour($d));
$d = [2, 2, 0]; $nn->addSample($d, behaviour($d));
$d = [2, 2, 2]; $nn->addSample($d, behaviour($d));
$d = [0, 0, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 0, 1]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 1, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 1, 1]; $nn->addSample($d, behaviourtest2($d));
$d = [1, 0, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [1, 0, 1]; $nn->addSample($d, behaviourtest2($d));
$d = [1, 1, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [1, 1, 1]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 0, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 0, 2]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 2, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [0, 2, 2]; $nn->addSample($d, behaviourtest2($d));
$d = [2, 0, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [2, 0, 2]; $nn->addSample($d, behaviourtest2($d));
$d = [2, 2, 0]; $nn->addSample($d, behaviourtest2($d));
$d = [2, 2, 2]; $nn->addSample($d, behaviourtest2($d));
echo "$part. Samples added to NeuralNetwork\n"; $part++;
$nn->store('test1/test1', true);
echo "$part. NeuralNetwork stored to 'test1/test1'\n"; $part++;
$nn->store('test2/test2', true);
echo "$part. NeuralNetwork stored to 'test2/test2'\n"; $part++;
}
/* [2] Initializing learning routine
=========================================================*/
$defaultMT = 0.3;
$fitness = 0;
$max_fit = 0;
$nn->loadLearningRoutine(function($input, $output){
global $fitness;
$fitness -= abs(round($output[0]) - behaviour($input)[0]);
$fitness -= abs($output[0] - behaviourtest2($input)[0]);
});
echo "$part. Learning routine initialized.\n"; $part++;
@ -85,6 +85,7 @@
=========================================================*/
/* (1) For each generation */
$last_gnr = -1;
$gen_repeat = 0;
while( true ){
if( $nn->gnr > $last_gnr)
@ -93,6 +94,7 @@
$last_gnr = $nn->gnr;
$max_fit = -1e9;
$min_fit = 100;
/* (2) For each genome */
while( true ){
@ -100,18 +102,19 @@
/* (2.1) Get current genome */
$g = $nn->getGenome();
echo "\r[x] gnm ".($nn->gnm+1)."/100 on gnr ".($nn->gnr+1)."/50 - max_fit: $max_fit ";
echo "\r[x] gnm ".($nn->gnm+1)."/500 on gnr ".($nn->gnr+1)."/50 - x".($gen_repeat+1)." - fit[$min_fit;$max_fit] ";
/* (2.2) Train genome with random samples */
for( $r = 0 ; $r < 100 ; $r++ )
$g->train([rand(0,100), rand(0,100), rand(0,100)]);
$g->train([rand(0,10), rand(0,10), rand(0,10)]);
/* (2.3) Set fitness & go to next genome */
if( $fitness > $max_fit ) $max_fit = $fitness;
if( $fitness < $min_fit ) $min_fit = $fitness;
$g->setFitness($fitness);
if( $nn->gnm >= 100-1 )
if( $nn->gnm >= 500-1 )
break;
$nn->nextGenome();
@ -119,9 +122,14 @@
$nn->nextGenome();
// If generation evolution, notify
if( $nn->gnr > $last_gnr)
if( $nn->gnr > $last_gnr){
echo "\n\t".((microtime(true)-$start))."s\n";
$gen_repeat = 0;
}else $gen_repeat++;
if( is_null($nn->gnr) || $nn->gnr == 50-1 )
break;
@ -132,6 +140,12 @@
}
if( $guess && 'guessing_process' ){
$part = 1;
@ -143,8 +157,8 @@
=========================================================*/
try{
$nn = NeuralNetwork::load('test1/test1');
echo "$part. NeuralNetwork loaded from 'test1/test1'\n"; $part++;
$nn = NeuralNetwork::load('test2/test2');
echo "$part. NeuralNetwork loaded from 'test2/test2'\n"; $part++;
/* [2] Else, creates it
=========================================================*/
@ -160,55 +174,17 @@
$genome = $nn->getTrainedGenome();
$genome->setCallback(function($in, $out){
echo "callback input: ".implode(',', $in)."\n";
echo "callback output: ".round($out[0])."\n";
echo "callback result: ".implode(',', behaviour($in))."\n";
echo "callback output: ".$out[0]."\n";
echo "callback result: ".implode(',', behaviourtest2($in))."\n";
});
$genome->train([rand(0,100), rand(0,100), rand(0,100)]);
$genome->train([rand(0,10), rand(0,10), rand(0,10)]);
}
if( false ){
$g = new Genome(2, 3, 3, 2);
$fitness = 0;
$g->setCallback(function($input, $output){
global $fitness;
echo "callback output: ".round($output[0]).", ".round($output[1])."\n";
$result = behaviour($input);
if( $output[0] == $result[0] )
$fitness++;
if( $output[1] == $result[1] )
$fitness++;
});
echo $g->train([0, 0, 0]);
echo $g->train([0, 0, 1]);
echo $g->train([0, 1, 0]);
echo $g->train([0, 1, 1]);
echo $g->train([1, 0, 0]);
echo $g->train([1, 0, 1]);
echo $g->train([1, 1, 0]);
echo $g->train([1, 1, 1]);
echo $g->train([0, 0, 0]);
echo $g->train([0, 0, 2]);
echo $g->train([0, 2, 0]);
echo $g->train([0, 2, 2]);
echo $g->train([2, 0, 0]);
echo $g->train([2, 0, 2]);
echo $g->train([2, 2, 0]);
echo $g->train([2, 2, 2]);
echo "fitness: $fitness\n";
$g->setFitness($fitness);
echo $g->serialize();
}
// REWRITE TEST
// for( $a = 0, $al = 50 ; $a < $al ; $a++ )