From 5aa53ee7227f5d80567a8e05fa0e75a2621da597 Mon Sep 17 00:00:00 2001 From: marcojob Date: Tue, 8 Nov 2022 16:12:26 +0100 Subject: [PATCH] Fix issue where batch_invstd not being set batch_invstd can be falsely assumed to be set, even though it is None since hasattr will not return false in this case BatchNorm2D a reshape will be attempted then, which causes an exception --- tinygrad/nn/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tinygrad/nn/__init__.py b/tinygrad/nn/__init__.py index 7205d3a08a0c..6a5c416373f0 100644 --- a/tinygrad/nn/__init__.py +++ b/tinygrad/nn/__init__.py @@ -34,7 +34,7 @@ def __call__(self, x): else: batch_mean, batch_var = self.running_mean, self.running_var # NOTE: this can be precomputed for static inference. if you manually update running_var, you have to reset this - if not hasattr(self, "batch_invstd"): + if not hasattr(self, "batch_invstd") or not self.batch_invstd: self.batch_invstd = batch_var.add(self.eps)**-0.5 batch_invstd = self.batch_invstd