From 11ba505252b41bfee8273391a5aed0fadba84f29 Mon Sep 17 00:00:00 2001 From: "Alex J. Champandard" Date: Tue, 1 Nov 2016 16:04:33 +0100 Subject: [PATCH] Fix for gradient clipping code. --- enhance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/enhance.py b/enhance.py index 781efd4..9998f89 100755 --- a/enhance.py +++ b/enhance.py @@ -428,7 +428,7 @@ class Model(object): disc_losses = [self.loss_discriminator(disc_out)] disc_params = list(itertools.chain(*[l.get_params() for k, l in self.network.items() if 'disc' in k])) print(' - {} tensors learned for discriminator.'.format(len(disc_params))) - grads = T.grad(sum(disc_losses, 0.0), disc_params).clip(-1.0, 1.0) + grads = [g.clip(-1.0, +1.0) for g in T.grad(sum(disc_losses, 0.0), disc_params)] disc_updates = lasagne.updates.adam(grads, disc_params, learning_rate=self.disc_lr) # Combined Theano function for updating both generator and discriminator at the same time.