neuralnet/network_test.go

174 lines
3.6 KiB
Go

package nn
import (
"gonum.org/v1/gonum/floats"
"testing"
)
func TestEmptyNetworkErrors(t *testing.T) {
F255 := make([]uint, 255)
for i, _ := range F255 {
F255[i] = 1
}
F256 := make([]uint, 256)
for i, _ := range F256 {
F256[i] = 1
}
tests := []struct {
Args []uint
Err error
}{
{[]uint{}, ErrMissingLayers},
{[]uint{1}, ErrMissingLayers},
{[]uint{1, 2}, nil},
{F255, nil},
{F256, ErrTooMuchLayers},
{[]uint{1, 1, 0}, ErrEmptyLayer},
}
for _, test := range tests {
_, err := Empty(test.Args...)
// unexpected error
if test.Err == nil && err != nil {
t.Errorf("Unexpected error <%s>", err)
continue
}
// expected error
if test.Err != nil && err != test.Err {
t.Errorf("Expected error <%s> but got <%s>", test.Err, err)
continue
}
}
}
func TestEmptyNetworkSizes(t *testing.T) {
tests := [][]uint{
{1, 2, 3},
{4, 6, 9, 10},
{1, 1, 1},
}
for _, test := range tests {
net, err := Empty(test...)
// unexpected error
if err != nil {
t.Errorf("Unexpected error <%s>", err)
continue
}
// 1. Check neuron layer count
if len(net.Neurons) != len(test) {
t.Errorf("Expected %d layers of neurons, got %d", len(test), len(net.Neurons))
continue
}
// 2. Check bias layer count (layers-1)
if len(net.Biases) != len(test)-1 {
t.Errorf("Expected %d layers of biases, got %d", len(test)-1, len(net.Biases))
continue
}
// 3. Check weights layer count (layers-1)
if len(net.Weights) != len(test)-1 {
t.Errorf("Expected %d layers of weights, got %d", len(test)-1, len(net.Weights))
continue
}
// 4. Check each neuron layer count
for n, neuron := range net.Neurons {
if uint(neuron.ColView(0).Len()) != test[n] {
t.Errorf("Expected %d neurons on layer %d, got %d", test[n], n, neuron.ColView(0).Len())
}
}
// 5. Check each bias layer count
for b, bias := range net.Biases {
if uint(bias.ColView(0).Len()) != test[b+1] {
t.Errorf("Expected %d biases on layer %d, got %d", test[b+1], b, bias.ColView(0).Len())
}
}
// 6. Check each weight layer count
for w, weight := range net.Weights {
rows, cols := weight.Dims()
if uint(rows) != test[w+1] { // invalid rows
t.Errorf("Expected %d weights' rows from layer %d to %d, got %d", test[w+1], w, w+1, rows)
}
if uint(cols) != test[w] { // invalid cols
t.Errorf("Expected %d weights' cols from layer %d to %d, got %d", test[w], w, w+1, cols)
}
}
}
}
func TestForwardPass(t *testing.T) {
tests := []struct {
Layers []uint
X []float64
}{
{
[]uint{2, 2, 1},
[]float64{0.5, 0.2},
}, {
[]uint{4, 5, 10},
[]float64{0.5, 0.2, 0.8, 0.4},
},
}
for _, test := range tests {
net, err := Empty(test.Layers...)
if err != nil {
t.Errorf("Unexpected error <%s>", err)
break
}
// apply forward pass
_, err = net.Guess(test.X...)
if err != nil {
t.Errorf("Unexpected error <%s>", err)
break
}
// check layer by layer (begin at 1)
for l, ll := 1, len(net.layers); l < ll; l++ {
// each neuron = ( each previous neuron times its weight ) + neuron bias
for n, nl := 0, net.Neurons[l].ColView(0).Len(); n < nl; n++ {
sum := net.Biases[l-1].At(n, 0)
// sum each previous neuron*its weight
for i, il := 0, net.Neurons[l-1].ColView(0).Len(); i < il; i++ {
sum += net.Neurons[l-1].At(i, 0) * net.Weights[l-1].At(n, i)
}
sum = sigmoid(0, 0, sum)
// check sum
if !floats.EqualWithinAbs(net.Neurons[l].At(n, 0), sum, 1e9) {
t.Fatalf("Expected neuron %d.%d to be %f, got %f", l, n, sum, net.Neurons[l].At(n, 0))
}
}
}
}
}