Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,8 @@ test/.coverage
test/.hypothesis/
test/cpp/api/mnist
test/custom_operator/model.pt
test/data/gpu_tensors.pt
test/data/legacy_modules.t7
test/data/legacy_serialized.pt
test/data/linear.pt
test/data/*.pt
dropout_model.pt
test/generated_type_hints_smoketest.py
test/htmlcov
Expand Down
21 changes: 21 additions & 0 deletions test/test_nn.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import math
import sys
import random
import string
import unittest
Expand Down Expand Up @@ -579,6 +580,26 @@ def test_module_backcompat(self):
input = torch.randn(2, 3, dtype=torch.float)
self.assertEqual(m(input).size(), (2, 5))

def test_conv_backcompat(self):
from torch.serialization import SourceChangeWarning
# This file was generated by running on PyTorch 1.0.1 on Python 2:
#
# import torch
# from torch import nn
# m = nn.Conv2d(1, 1, 1)
# torch.save(m, 'legacy_conv2d.pt')
#
# NB: This Pickle also contains some Unicode data!
path = download_file('https://download.pytorch.org/test_data/legacy_conv2d.pt')
with warnings.catch_warnings():
warnings.simplefilter('ignore', SourceChangeWarning)
if sys.version_info[0] == 2:
m = torch.load(path)
else:
m = torch.load(path, encoding='utf-8')
input = torch.randn((1, 1, 1, 1), dtype=torch.float)
self.assertEqual(m(input).size(), (1, 1, 1, 1))

def test_share_memory(self):
class Net(nn.Module):
def __init__(self):
Expand Down
5 changes: 5 additions & 0 deletions torch/nn/modules/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,11 @@ def extra_repr(self):
s += ', bias=False'
return s.format(**self.__dict__)

def __setstate__(self, state):
super(_ConvNd, self).__setstate__(state)
if not hasattr(self, 'padding_mode'):
self.padding_mode = 'zeros'


@weak_module
class Conv1d(_ConvNd):
Expand Down