Skip to content

Commit

Permalink
Fixed float16 error wrt Theano=0.8.2
Browse files Browse the repository at this point in the history
  • Loading branch information
Sentient07 committed Jul 31, 2017
1 parent 6327a74 commit 5060a2a
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 1 deletion.
2 changes: 1 addition & 1 deletion lasagne/layers/noise.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def get_output_for(self, input, deterministic=False, **kwargs):
return input
else:
# Using theano constant to prevent upcasting
one = T.constant(1)
one = T.constant(1, dtype='int8')

retain_prob = one - self.p
if self.rescale:
Expand Down
6 changes: 6 additions & 0 deletions lasagne/tests/layers/test_noise.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,12 @@ def test_get_output_for_p_float32(self, input_layer):
input = theano.shared(numpy.ones((100, 100), dtype=numpy.float32))
assert layer.get_output_for(input).dtype == input.dtype

def test_get_output_for_p_float16(self, input_layer):
from lasagne.layers.noise import DropoutLayer
layer = DropoutLayer(input_layer, p=numpy.float16(0.5))
input = theano.shared(numpy.ones((100, 100), dtype=numpy.float16))
assert layer.get_output_for(input).dtype == input.dtype

@pytest.mark.parametrize("shared_axes", [(), (0,), (2, 3), (-1, -2)])
def test_get_output_for_shared_axes(self, shared_axes):
from lasagne.layers.noise import DropoutLayer
Expand Down

0 comments on commit 5060a2a

Please sign in to comment.