Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions torch/optim/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@
class Optimizer(object):
"""Base class for all optimizers.

.. warning::
Parameters needs to be specified as collections that have a deterministic
ordering that is consistent between runs. Examples of objects that don't
satisfy those properties are sets and iterators over values of dictionaries.

Arguments:
params (iterable): an iterable of :class:`Variable` s or
:class:`dict` s. Specifies what Variables should be optimized.
Expand Down Expand Up @@ -175,6 +180,9 @@ def add_param_group(self, param_group):
params = param_group['params']
if isinstance(params, Variable):
param_group['params'] = [params]
elif isinstance(params, set):
raise TypeError('optimizer parameters need to be organized in ordered collections, but '
'the ordering of tensors in sets will change between runs. Please use a list instead.')
else:
param_group['params'] = list(params)

Expand Down