From a76098ac1532d5e9ee24b4776258ae731627f8e3 Mon Sep 17 00:00:00 2001 From: Yan Wang Date: Sat, 3 Jun 2017 15:00:11 +0800 Subject: fix optimizer when given single parameters (instead of an iterable) When I use the named_parametes to modify the lr and weight decay, I will face a bug. Because the value of the named_parameters return is torch.nn.paramter.Parameter, not a generator of the Parameter. --- torch/optim/optimizer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'torch/optim') diff --git a/torch/optim/optimizer.py b/torch/optim/optimizer.py index ebeddd1129..12285e913b 100644 --- a/torch/optim/optimizer.py +++ b/torch/optim/optimizer.py @@ -33,7 +33,10 @@ class Optimizer(object): param_set = set() for group in self.param_groups: - group['params'] = list(group['params']) + if isinstance(group['params'], torch.autograd.Variable): + group['params'] = [group['params']] + else: + group['params'] = list(group['params']) group_set = set(group['params']) if not param_set.isdisjoint(group_set): raise ValueError("some parameters appear in more than one " -- cgit v1.2.3