Skip to content

Commit

Permalink
Add initializer compatibility test (#67)
Browse files Browse the repository at this point in the history
* Add initializer test

* Fix initializers in Theano

* Tweak layer and optimizer test

* Add version print
  • Loading branch information
mthrok authored Oct 27, 2016
1 parent 04612e0 commit 8dfa9b0
Show file tree
Hide file tree
Showing 32 changed files with 355 additions and 85 deletions.
1 change: 1 addition & 0 deletions circle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ test:
- LUCHADOR_NN_BACKEND=tensorflow LUCHADOR_NN_CONV_FORMAT=NHWC coverage run --source luchador -a setup.py test
# Integration tests
- ./tests/integration/run_serialization_tests.sh
- ./tests/integration/run_initializer_compatibility_test.sh
- ./tests/integration/run_layer_numerical_compatibility_tests.sh
- ./tests/integration/run_optimizer_numerical_compatibility_tests.sh
- LUCHADOR_NN_BACKEND=theano LUCHADOR_NN_CONV_FORMAT=NCHW ./tests/integration/run_dqn.sh
Expand Down
6 changes: 3 additions & 3 deletions luchador/nn/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@

from .base import * # noqa: F401, F403

logging.getLogger(__name__).info(
'Using %s backend', luchador.get_nn_backend()
)
_LG = logging.getLogger(__name__)
_LG.info('Luchador Version: %s', luchador.__version__)
_LG.info('Luchador NN backend: %s', luchador.get_nn_backend())

if luchador.get_nn_backend() == 'tensorflow':
from .tensorflow import * # noqa: F401, F403
Expand Down
47 changes: 25 additions & 22 deletions luchador/nn/core/theano/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import numpy as np
from numpy.random import RandomState
from scipy.stats import truncnorm as tnorm

from theano import config

Expand Down Expand Up @@ -64,31 +65,24 @@ class Xavier(InitializerMixin, base_initializer.BaseXavier):
def _sample(self, shape):
if not len(shape) == 2:
raise ValueError(
'Xavier initializer expects the shape to have 2 elements, '
'e.g. [fan_in, fan_out]. Found: {}'.format(shape)
'Xavier initializer expects the shape to have 2 elements.'
'Found: {}'.format(shape)
)

fan_in, fan_out = shape
param = self._compute_param(fan_in, fan_out)
return self._sample_value(shape, param)

def _compute_param(self, fan_in, fan_out):
fan_out, fan_in = shape[0], shape[1]
scale = np.sqrt(6. / (fan_in + fan_out))
if self.args['uniform']:
x = np.sqrt(6. / (fan_in + fan_out))
return {'low': -x, 'high': x}
value = self._sample_uniform(scale, shape)
else:
scale = np.sqrt(3. / (fan_in + fan_out))
return {'loc': 0., 'scale': scale}
value = self._sample_truncated_normal(scale, shape)
return value.astype(self.args['dtype'] or config.floatX)

def _sample_value(self, shape, param):
if self.args['uniform']:
values = self._rng.uniform(
low=param['low'], high=param['high'], size=shape)
else:
values = self._rng.normal(
loc=param['loc'], scale=param['scale'], size=shape)
dtype = self.args['dtype'] or config.floatX
return values.astype(dtype)
def _sample_uniform(self, scale, shape):
return self._rng.uniform(low=-scale, high=scale, size=shape)

def _sample_truncated_normal(self, scale, shape):
return tnorm.rvs(
-1, 1, scale=scale, size=shape, random_state=self._rng)


class XavierConv2D(Xavier):
Expand All @@ -97,9 +91,18 @@ class XavierConv2D(Xavier):
See :any:`BaseXavierConv2D` for detail.
"""
def _sample(self, shape):
if not len(shape) == 4:
raise ValueError(
'Xavier conv2d initializer expects the shape with 4 elements.'
'Found: {}'.format(shape)
)
# theano's filter shape is
# (output_channels, input_channels, filter_rows, filter_columns)
fan_in = shape[1] * shape[2] * shape[3]
fan_out = shape[0] * shape[2] * shape[3]
param = self._compute_param(fan_in, fan_out)
return self._sample_value(shape, param)
scale = np.sqrt(6. / (fan_in + fan_out))
if self.args['uniform']:
value = self._sample_uniform(scale, shape)
else:
value = self._sample_truncated_normal(scale, shape)
return value.astype(self.args['dtype'] or config.floatX)
4 changes: 4 additions & 0 deletions tests/integration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ This directory contains the list of intergation tests.

This test builds and run DQN against ALEEnvironment so as to verify that it is not broken.

* `run_initializer_compatibility_tests.sh`

This test runs initializers and check if the distribution is correct.

* `run_layer_numerical_compatibility_tests.sh`

This test compares the outputs from fixed layer configuration/parameter and input so as to ensure layers' behavior is same across backends.
Expand Down
12 changes: 12 additions & 0 deletions tests/integration/data/initializer/constant.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
initializer:
name: Constant
args:
value: 3.2

test_config:
shape: [4, 3, 2, 1]

compare_config:
threshold: !!float 1e-4
mean: 3.2
std: 0
13 changes: 13 additions & 0 deletions tests/integration/data/initializer/normal.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
initializer:
name: Normal
args:
mean: &mean 5.3
stddev: &stddev 9.0

test_config:
shape: [32, 16, 4, 4]

compare_config:
mean: *mean
std: *stddev
threshold: 0.05
14 changes: 14 additions & 0 deletions tests/integration/data/initializer/uniform.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
initializer:
name: Uniform
args:
minval: -2.0
maxval: 6.0

test_config:
shape: [16, 16, 8, 8]

compare_config:
threshold: 0.03
mean: 2.0
std: 2.309 # (maxval - min_val) / sqrt(12)

20 changes: 20 additions & 0 deletions tests/integration/data/initializer/xavier_conv2d_normal.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
initializer:
name: XavierConv2D
args:
uniform: False

test_config:
shape: [64, 32, 8, 8]

compare_config:
threshold: 0.1
mean: 0.0
std: 0.0168
# Standard deviation here is not that of normal distribution, but of truncated normal distribution
# with bound of `2 * standard deviation of normal distribution`.
# To get this you can use scipy.stats.truncnorm class.
#
# fan_out, fan_in = 64 * 8 * 8, 32 * 8 * 8
# scale = np.sqrt(3. / (fan_in + fan_out))
# variance = truncnorm.stats(-2, 2, scale=scale, moments='v')
# std = np.sqrt(variance)
19 changes: 19 additions & 0 deletions tests/integration/data/initializer/xavier_conv2d_uniform.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
initializer:
name: XavierConv2D
args:
uniform: True

test_config:
shape: [64, 32, 8, 8]

compare_config:
threshold: 0.03
mean: 0.0
std: 0.01804
# Standard deviation here is not that of normal distribution, but of truncated normal distribution
# with bound of `2 * standard deviation of normal distribution`.
# To get this you can use scipy.stats.truncnorm class.
#
# fan_out, fan_in = 64 * 8 * 8, 32 * 8 * 8
# scale = np.sqrt(6. / (fan_in + fan_out))
# std = (scale + scale) / sqrt(12)
20 changes: 20 additions & 0 deletions tests/integration/data/initializer/xavier_normal.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
initializer:
name: Xavier
args:
uniform: False

test_config:
shape: [1000, 100]

compare_config:
threshold: 0.1
mean: 0.0
std: 0.03984
# Standard deviation here is not that of normal distribution, but of truncated normal distribution
# with bound of `2 * standard deviation of normal distribution`.
# To get this you can use scipy.stats.tnorm class.
#
# fan_out, fan_in = 1000, 100
# scale = np.sqrt(3. / (fan_in + fan_out))
# variance = tnorm.stats(-2, 2, scale=scale, moments='v')
# std = np.sqrt(variance)
19 changes: 19 additions & 0 deletions tests/integration/data/initializer/xavier_uniform.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
initializer:
name: Xavier
args:
uniform: True

test_config:
shape: [64, 32]

compare_config:
threshold: 0.03
mean: 0.0
std: 0.1443
# Standard deviation here is not that of normal distribution, but of truncated normal distribution
# with bound of `2 * standard deviation of normal distribution`.
# To get this you can use scipy.stats.truncnorm class.
#
# fan_out, fan_in = 16, 32
# scale = np.sqrt(6. / (fan_in + fan_out))
# std = (scale + scale) / sqrt(12)
2 changes: 0 additions & 2 deletions tests/integration/data/layer/batch_normalization_2d_learn.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

run:
iteration: 10

Expand All @@ -13,4 +12,3 @@ layer:
input: input_randn_3x5_offset_3.h5

parameter: parameter_bn.h5

Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

run:
iteration: 10

Expand All @@ -13,4 +12,3 @@ layer:
input: input_randn_3x5_offset_3.h5

parameter: parameter_bn.h5

3 changes: 0 additions & 3 deletions tests/integration/data/layer/batch_normalization_4d_learn.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

run:
iteration: 10

Expand All @@ -10,8 +9,6 @@ layer:
learn: True
decay: 0.999


input: input_randn_3x5_offset_3.h5

parameter: parameter_bn.h5

Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

run:
iteration: 10

Expand All @@ -13,4 +12,3 @@ layer:
input: input_randn_3x5_offset_3.h5

parameter: parameter_bn.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/conv2d_same.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

layer:
name: Conv2D
args:
Expand All @@ -12,4 +11,3 @@ layer:
input: input_mnist_10x4x28x27.h5

parameter: parameter_randn_3x4x7x5.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/conv2d_valid.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

layer:
name: Conv2D
args:
Expand All @@ -12,4 +11,3 @@ layer:
input: input_mnist_10x4x28x27.h5

parameter: parameter_randn_3x4x7x5.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/conv2d_without_bias.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

layer:
name: Conv2D
args:
Expand All @@ -12,4 +11,3 @@ layer:
input: input_mnist_10x4x28x27.h5

parameter: parameter_randn_3x4x7x5.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/dense.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

layer:
name: Dense
args:
Expand All @@ -8,4 +7,3 @@ layer:
input: input_randn_5x3.h5

parameter: parameter_randn_3x7.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/dense_without_bias.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

layer:
name: Dense
args:
Expand All @@ -8,4 +7,3 @@ layer:
input: input_randn_5x3.h5

parameter: parameter_randn_3x7.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/flatten.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@

layer:
name: Flatten
args: {}

input: input_mnist_10x4x28x27.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/relu.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@

layer:
name: ReLU
args: {}

input: input_randn_5x3.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/sigmoid.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@

layer:
name: Sigmoid
args: {}

input: input_randn_5x3.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/softmax.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@

layer:
name: Softmax
args: {}

input: input_randn_5x3.h5

2 changes: 0 additions & 2 deletions tests/integration/data/layer/true_div.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@

layer:
name: TrueDiv
args:
denom: 255

input: input_randint_1x3x5x7.h5

18 changes: 18 additions & 0 deletions tests/integration/run_initializer_compatibility_test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/bin/bash
set -u

BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
DATA_DIR="${BASE_DIR}/data/initializer"
TEST_DIR="${BASE_DIR}/test_initializer_compatibility"
TEST_COMMAND="${TEST_DIR}/test_initializer_compatibility.sh"

RETURN=0
for FILE in ${DATA_DIR}/*.yml
do
"${TEST_COMMAND}" "${FILE}"
if [[ ! $? = 0 ]]; then
RETURN=1
fi
done

exit ${RETURN}
Loading

0 comments on commit 8dfa9b0

Please sign in to comment.