summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWill Feng <yf225@cornell.edu>2018-02-17 23:49:12 -0500
committerEdward Z. Yang <ezyang@mit.edu>2018-02-17 20:49:12 -0800
commit9193dfd185ac526d0d71c6fd100a52b0b2e10e58 (patch)
tree26105ef803c97d502b04b6ebd22cee7cfdb6a230
parentc71c84ee044de7fc7a9b337995f6864ec92cbcd8 (diff)
downloadpytorch-9193dfd185ac526d0d71c6fd100a52b0b2e10e58.tar.gz
pytorch-9193dfd185ac526d0d71c6fd100a52b0b2e10e58.tar.bz2
pytorch-9193dfd185ac526d0d71c6fd100a52b0b2e10e58.zip
Disable test_multi_drop on Windows (#5290)
-rw-r--r--test/test_utils.py2
1 files changed, 2 insertions, 0 deletions
diff --git a/test/test_utils.py b/test/test_utils.py
index 816c019396..32c6f00c56 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -18,6 +18,7 @@ from torch.utils.trainer.plugins.plugin import Plugin
from torch.utils.serialization import load_lua
from torch.autograd._functions.utils import prepare_onnx_paddings
from torch.autograd._functions.utils import check_onnx_broadcast
+from common import IS_WINDOWS
HAS_CUDA = torch.cuda.is_available()
@@ -139,6 +140,7 @@ class TestDataLoader(TestCase):
dataiter = iter(dataloader)
self.assertEqual(len(list(dataiter)), 2)
+ @unittest.skipIf(IS_WINDOWS, "FIXME: Intermittent CUDA out-of-memory error")
def test_multi_drop(self):
dataloader = torch.utils.data.DataLoader(self.dataset,
batch_size=self.batch_size,