2016-10-25 22:52:28 +00:00
|
|
|
<?php define('__ROOT__', dirname(dirname(__FILE__)) );
|
|
|
|
|
|
|
|
require_once __ROOT__.'/autoloader.php';
|
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
use \neuralnetwork\core\Genome;
|
2016-10-25 22:52:28 +00:00
|
|
|
use \neuralnetwork\core\NeuralNetwork;
|
|
|
|
use \filemanager\core\FileManager;
|
|
|
|
|
2016-10-27 12:47:08 +00:00
|
|
|
function behaviour($abc){
|
|
|
|
return [($abc[0] & $abc[1]), $abc[1] | $abc[2]];
|
|
|
|
}
|
2016-10-26 15:15:00 +00:00
|
|
|
|
2016-10-27 16:34:28 +00:00
|
|
|
if( true && 'test_creating_dataset' ){
|
|
|
|
|
|
|
|
$part = 1;
|
2016-10-26 15:15:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
echo "Welcome to neural-network.php\n";
|
2016-10-27 16:34:28 +00:00
|
|
|
echo "-----------------------------\n\n";
|
|
|
|
|
|
|
|
/* [1] Trying to load neural network
|
|
|
|
=========================================================*/
|
|
|
|
try{
|
|
|
|
|
|
|
|
$nn = NeuralNetwork::load('test/test1');
|
|
|
|
echo "$part. NeuralNetwork loaded from 'test/test1'\n"; $part++;
|
|
|
|
|
|
|
|
/* [2] Else, creates it
|
|
|
|
=========================================================*/
|
|
|
|
}catch(\Exception $e){
|
|
|
|
|
|
|
|
$nn = NeuralNetwork::create(50, 1000);
|
|
|
|
|
|
|
|
$nn->setHiddenLayersCount(4);
|
2016-10-27 19:28:09 +00:00
|
|
|
$nn->setHiddenLayerNeuronsCount(4);
|
2016-10-27 16:34:28 +00:00
|
|
|
$nn->setInputLayerCount(3);
|
|
|
|
$nn->setOutputLayerCount(2);
|
2016-10-27 19:28:09 +00:00
|
|
|
$nn->setMutationThreshold(0.5);
|
2016-10-27 16:34:28 +00:00
|
|
|
|
|
|
|
echo "$part. NeuralNetwork configured\n"; $part++;
|
|
|
|
|
|
|
|
$d = [0, 0, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 0, 1]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 1, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 1, 1]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [1, 0, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [1, 0, 1]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [1, 1, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [1, 1, 1]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 0, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 0, 2]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 2, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [0, 2, 2]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [2, 0, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [2, 0, 2]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [2, 2, 0]; $nn->addSample($d, behaviour($d));
|
|
|
|
$d = [2, 2, 2]; $nn->addSample($d, behaviour($d));
|
|
|
|
echo "$part. Samples added to NeuralNetwork\n"; $part++;
|
|
|
|
|
|
|
|
$nn->store('test/test1', true);
|
|
|
|
echo "$part. NeuralNetwork stored to 'test/test1'\n"; $part++;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* [2] Initializing learning routine
|
|
|
|
=========================================================*/
|
|
|
|
$fitness = 0;
|
|
|
|
$max_fit = 0;
|
2016-10-27 19:28:09 +00:00
|
|
|
$nn->loadLearningRoutine(function($input, $output){
|
2016-10-27 16:34:28 +00:00
|
|
|
global $fitness;
|
2016-10-27 19:28:09 +00:00
|
|
|
$diff = [ abs($output[0] - behaviour($input)[0]), abs($output[1] - behaviour($input)[1]) ];
|
|
|
|
|
|
|
|
if( $diff[0] > 0 ) $fitness += 1 / $diff[0];
|
|
|
|
if( $diff[1] > 0 ) $fitness += 1 / $diff[1];
|
2016-10-27 16:34:28 +00:00
|
|
|
});
|
|
|
|
echo "$part. Learning routine initialized.\n"; $part++;
|
|
|
|
|
|
|
|
|
|
|
|
/* [3] Learning through generations and genomes
|
|
|
|
=========================================================*/
|
|
|
|
/* (1) For each generation */
|
|
|
|
for( $gnr = 0 ; $gnr < 50 ; $gnr++ ){
|
|
|
|
|
2016-10-27 19:28:09 +00:00
|
|
|
$max_fit = 0;
|
|
|
|
$start = microtime(true);
|
|
|
|
|
2016-10-27 16:34:28 +00:00
|
|
|
/* (2) For each genome */
|
|
|
|
for( $gnm = 0 ; $gnm < 1000 ; $gnm++ ){
|
|
|
|
$fitness = 0;
|
|
|
|
|
|
|
|
/* (2.1) Get current genome */
|
|
|
|
$g = $nn->getGenome();
|
|
|
|
echo "\r[x] genome ".($nn->gnm+1)."/1000 on generation ".($nn->gnr+1)."/50 - max fitness: $max_fit ";
|
|
|
|
|
|
|
|
/* (2.2) Train genome with random samples */
|
|
|
|
for( $r = 0 ; $r < 100 ; $r++ )
|
|
|
|
$g->train([rand(0,10), rand(0,10), rand(0,10)]);
|
|
|
|
|
|
|
|
/* (2.3) Set fitness & go to next genome */
|
|
|
|
if( $fitness > $max_fit ) $max_fit = $fitness;
|
|
|
|
|
|
|
|
$g->setFitness($fitness);
|
|
|
|
$nn->nextGenome();
|
|
|
|
}
|
|
|
|
|
2016-10-27 19:28:09 +00:00
|
|
|
echo "\n\t".((microtime(true)-$start))."s\n";
|
|
|
|
|
2016-10-27 16:34:28 +00:00
|
|
|
}
|
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
|
2016-10-25 22:52:28 +00:00
|
|
|
}
|
|
|
|
|
2016-10-27 16:34:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
if( false && 'load_neural_network' ){
|
|
|
|
|
2016-10-26 22:19:28 +00:00
|
|
|
$nn = NeuralNetwork::load('test/test1');
|
2016-10-26 15:15:00 +00:00
|
|
|
|
|
|
|
}
|
2016-10-25 22:52:28 +00:00
|
|
|
|
2016-10-26 22:19:28 +00:00
|
|
|
if( false && 'test_genomes' ){
|
2016-10-26 15:15:00 +00:00
|
|
|
/* (1) Basic Creation */
|
2016-10-27 16:34:28 +00:00
|
|
|
$a = new Genome(2, 3, 3, 2); // 2 layers of 3 neurons each -> randomly filled
|
2016-10-25 22:52:28 +00:00
|
|
|
|
2016-10-26 16:07:52 +00:00
|
|
|
echo "A : ".$a->serialize()."\n";
|
2016-10-26 15:24:51 +00:00
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
/* (2) Inheritance */
|
2016-10-26 15:24:51 +00:00
|
|
|
$b = new Genome($a); // Clone of @aecho "A neurons\n";
|
2016-10-26 16:07:52 +00:00
|
|
|
echo "cloning A to B\n";
|
|
|
|
echo "B : ".$b->serialize()."\n";
|
2016-10-26 15:15:00 +00:00
|
|
|
|
|
|
|
/* (3) Section Title */
|
2016-10-26 15:53:56 +00:00
|
|
|
$b->mutation(0.3); // @b has now mutated with a threshold of 30%
|
2016-10-26 16:07:52 +00:00
|
|
|
echo "mutate B\n";
|
|
|
|
echo "B : ".$b->serialize()."\n";
|
2016-10-26 15:15:00 +00:00
|
|
|
|
|
|
|
/* (4) Cross-over (father+mother) */
|
|
|
|
$c = new Genome($a, $b); // @c is a randomly-done mix of @a and @b
|
2016-10-26 16:07:52 +00:00
|
|
|
echo "crossover : A+B -> C\n";
|
|
|
|
echo "C : ".$c->serialize()."\n";
|
2016-10-26 22:19:28 +00:00
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
}
|
2016-10-25 22:52:28 +00:00
|
|
|
|
|
|
|
|
2016-10-27 16:34:28 +00:00
|
|
|
if( false ){
|
2016-10-26 22:19:28 +00:00
|
|
|
|
2016-10-27 12:47:08 +00:00
|
|
|
$g = new Genome(2, 3, 3, 2);
|
|
|
|
$fitness = 0;
|
|
|
|
$g->setCallback(function($input, $output){
|
|
|
|
global $fitness;
|
|
|
|
echo "callback output: ".round($output[0]).", ".round($output[1])."\n";
|
|
|
|
$result = behaviour($input);
|
|
|
|
if( $output[0] == $result[0] )
|
|
|
|
$fitness++;
|
|
|
|
|
|
|
|
if( $output[1] == $result[1] )
|
|
|
|
$fitness++;
|
|
|
|
});
|
|
|
|
|
|
|
|
echo $g->train([0, 0, 0]);
|
|
|
|
echo $g->train([0, 0, 1]);
|
|
|
|
echo $g->train([0, 1, 0]);
|
|
|
|
echo $g->train([0, 1, 1]);
|
|
|
|
echo $g->train([1, 0, 0]);
|
|
|
|
echo $g->train([1, 0, 1]);
|
|
|
|
echo $g->train([1, 1, 0]);
|
|
|
|
echo $g->train([1, 1, 1]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
echo $g->train([0, 0, 0]);
|
|
|
|
echo $g->train([0, 0, 2]);
|
|
|
|
echo $g->train([0, 2, 0]);
|
|
|
|
echo $g->train([0, 2, 2]);
|
|
|
|
echo $g->train([2, 0, 0]);
|
|
|
|
echo $g->train([2, 0, 2]);
|
|
|
|
echo $g->train([2, 2, 0]);
|
|
|
|
echo $g->train([2, 2, 2]);
|
|
|
|
|
|
|
|
echo "fitness: $fitness\n";
|
|
|
|
$g->setFitness($fitness);
|
|
|
|
echo $g->serialize();
|
2016-10-26 22:19:28 +00:00
|
|
|
}
|
|
|
|
|
2016-10-26 15:15:00 +00:00
|
|
|
// REWRITE TEST
|
|
|
|
// for( $a = 0, $al = 50 ; $a < $al ; $a++ )
|
|
|
|
// for( $b = 0, $bl = 20 ; $b < $bl ; $b++ ){
|
|
|
|
// print "genome $b/$bl on generation $a/$al \r";
|
|
|
|
// usleep(1000*10);
|
|
|
|
// }
|
2016-10-25 22:52:28 +00:00
|
|
|
|
|
|
|
?>
|