fix max-grad-norm err in a2c (#46)

This commit is contained in:
Trinkle23897 2020-05-04 12:33:04 +08:00
parent c2a7caf806
commit 04b091d975

View File

@ -103,7 +103,9 @@ class A2CPolicy(PGPolicy):
loss.backward()
if self._grad_norm:
nn.utils.clip_grad_norm_(
self.model.parameters(), max_norm=self._grad_norm)
list(self.actor.parameters()) +
list(self.critic.parameters()),
max_norm=self._grad_norm)
self.optim.step()
actor_losses.append(a_loss.item())
vf_losses.append(vf_loss.item())