2018-10-10 17:11:20 +00:00
|
|
|
package nn
|
|
|
|
|
|
|
|
import (
|
|
|
|
"gonum.org/v1/gonum/floats"
|
|
|
|
"testing"
|
|
|
|
)
|
|
|
|
|
|
|
|
func TestEmptyNetworkErrors(t *testing.T) {
|
|
|
|
|
|
|
|
F255 := make([]uint, 255)
|
|
|
|
for i, _ := range F255 {
|
|
|
|
F255[i] = 1
|
|
|
|
}
|
|
|
|
F256 := make([]uint, 256)
|
|
|
|
for i, _ := range F256 {
|
|
|
|
F256[i] = 1
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
Args []uint
|
|
|
|
Err error
|
|
|
|
}{
|
|
|
|
{[]uint{}, ErrMissingLayers},
|
|
|
|
{[]uint{1}, ErrMissingLayers},
|
|
|
|
{[]uint{1, 2}, nil},
|
|
|
|
{F255, nil},
|
|
|
|
{F256, ErrTooMuchLayers},
|
|
|
|
{[]uint{1, 1, 0}, ErrEmptyLayer},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
|
|
|
|
_, err := Empty(test.Args...)
|
|
|
|
|
|
|
|
// unexpected error
|
|
|
|
if test.Err == nil && err != nil {
|
|
|
|
t.Errorf("Unexpected error <%s>", err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// expected error
|
|
|
|
if test.Err != nil && err != test.Err {
|
|
|
|
t.Errorf("Expected error <%s> but got <%s>", test.Err, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEmptyNetworkSizes(t *testing.T) {
|
|
|
|
|
|
|
|
tests := [][]uint{
|
|
|
|
{1, 2, 3},
|
|
|
|
{4, 6, 9, 10},
|
|
|
|
{1, 1, 1},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
|
|
|
|
net, err := Empty(test...)
|
|
|
|
|
|
|
|
// unexpected error
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Unexpected error <%s>", err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// 1. Check neuron layer count
|
|
|
|
if len(net.Neurons) != len(test) {
|
|
|
|
t.Errorf("Expected %d layers of neurons, got %d", len(test), len(net.Neurons))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// 2. Check bias layer count (layers-1)
|
|
|
|
if len(net.Biases) != len(test)-1 {
|
|
|
|
t.Errorf("Expected %d layers of biases, got %d", len(test)-1, len(net.Biases))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// 3. Check weights layer count (layers-1)
|
|
|
|
if len(net.Weights) != len(test)-1 {
|
|
|
|
t.Errorf("Expected %d layers of weights, got %d", len(test)-1, len(net.Weights))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// 4. Check each neuron layer count
|
|
|
|
for n, neuron := range net.Neurons {
|
2018-10-11 21:26:36 +00:00
|
|
|
if uint(neuron.ColView(0).Len()) != test[n] {
|
|
|
|
t.Errorf("Expected %d neurons on layer %d, got %d", test[n], n, neuron.ColView(0).Len())
|
2018-10-10 17:11:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// 5. Check each bias layer count
|
|
|
|
for b, bias := range net.Biases {
|
|
|
|
|
2018-10-11 21:26:36 +00:00
|
|
|
if uint(bias.ColView(0).Len()) != test[b+1] {
|
|
|
|
t.Errorf("Expected %d biases on layer %d, got %d", test[b+1], b, bias.ColView(0).Len())
|
2018-10-10 17:11:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// 6. Check each weight layer count
|
|
|
|
for w, weight := range net.Weights {
|
|
|
|
|
|
|
|
rows, cols := weight.Dims()
|
|
|
|
|
|
|
|
if uint(rows) != test[w+1] { // invalid rows
|
|
|
|
t.Errorf("Expected %d weights' rows from layer %d to %d, got %d", test[w+1], w, w+1, rows)
|
|
|
|
}
|
|
|
|
if uint(cols) != test[w] { // invalid cols
|
|
|
|
t.Errorf("Expected %d weights' cols from layer %d to %d, got %d", test[w], w, w+1, cols)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestForwardPass(t *testing.T) {
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
Layers []uint
|
|
|
|
X []float64
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
[]uint{2, 2, 1},
|
|
|
|
[]float64{0.5, 0.2},
|
|
|
|
}, {
|
|
|
|
[]uint{4, 5, 10},
|
|
|
|
[]float64{0.5, 0.2, 0.8, 0.4},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
|
|
|
|
net, err := Empty(test.Layers...)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Unexpected error <%s>", err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
// apply forward pass
|
2018-10-11 21:26:36 +00:00
|
|
|
_, err = net.Guess(test.X...)
|
2018-10-10 17:11:20 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Unexpected error <%s>", err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
// check layer by layer (begin at 1)
|
|
|
|
for l, ll := 1, len(net.layers); l < ll; l++ {
|
|
|
|
|
|
|
|
// each neuron = ( each previous neuron times its weight ) + neuron bias
|
2018-10-11 21:26:36 +00:00
|
|
|
for n, nl := 0, net.Neurons[l].ColView(0).Len(); n < nl; n++ {
|
|
|
|
sum := net.Biases[l-1].At(n, 0)
|
2018-10-10 17:11:20 +00:00
|
|
|
|
|
|
|
// sum each previous neuron*its weight
|
2018-10-11 21:26:36 +00:00
|
|
|
for i, il := 0, net.Neurons[l-1].ColView(0).Len(); i < il; i++ {
|
|
|
|
sum += net.Neurons[l-1].At(i, 0) * net.Weights[l-1].At(n, i)
|
2018-10-10 17:11:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sum = sigmoid(0, 0, sum)
|
|
|
|
|
|
|
|
// check sum
|
2018-10-11 21:26:36 +00:00
|
|
|
if !floats.EqualWithinAbs(net.Neurons[l].At(n, 0), sum, 1e9) {
|
|
|
|
t.Fatalf("Expected neuron %d.%d to be %f, got %f", l, n, sum, net.Neurons[l].At(n, 0))
|
2018-10-10 17:11:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|