diff options
author | Edward Yang <ezyang@fb.com> | 2019-03-30 08:58:10 -0700 |
---|---|---|
committer | Facebook Github Bot <facebook-github-bot@users.noreply.github.com> | 2019-03-30 09:01:17 -0700 |
commit | 173f224570017b4b1a3a1a13d0bff280a54d9cd9 (patch) | |
tree | a26ac171d14015f51edbd710f788613d6517f4cf /torch/utils | |
parent | 96456bfa4cf9394c9c926b143cf724a09901908d (diff) | |
download | pytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.tar.gz pytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.tar.bz2 pytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.zip |
Turn on F401: Unused import warning. (#18598)
Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/18598
ghimport-source-id: c74597e5e7437e94a43c163cee0639b20d0d0c6a
Stack from [ghstack](https://github.com/ezyang/ghstack):
* **#18598 Turn on F401: Unused import warning.**
This was requested by someone at Facebook; this lint is turned
on for Facebook by default. "Sure, why not."
I had to noqa a number of imports in __init__. Hypothetically
we're supposed to use __all__ in this case, but I was too lazy
to fix it. Left for future work.
Be careful! flake8-2 and flake8-3 behave differently with
respect to import resolution for # type: comments. flake8-3 will
report an import unused; flake8-2 will not. For now, I just
noqa'd all these sites.
All the changes were done by hand.
Signed-off-by: Edward Z. Yang <ezyang@fb.com>
Differential Revision: D14687478
fbshipit-source-id: 30d532381e914091aadfa0d2a5a89404819663e3
Diffstat (limited to 'torch/utils')
-rw-r--r-- | torch/utils/bottleneck/__main__.py | 3 | ||||
-rw-r--r-- | torch/utils/collect_env.py | 2 | ||||
-rw-r--r-- | torch/utils/data/__init__.py | 8 | ||||
-rw-r--r-- | torch/utils/data/_utils/__init__.py | 2 | ||||
-rw-r--r-- | torch/utils/data/_utils/pin_memory.py | 2 | ||||
-rw-r--r-- | torch/utils/data/_utils/signal_handling.py | 4 |
6 files changed, 7 insertions, 14 deletions
diff --git a/torch/utils/bottleneck/__main__.py b/torch/utils/bottleneck/__main__.py index ae5de6b9da..0cca3c4b04 100644 --- a/torch/utils/bottleneck/__main__.py +++ b/torch/utils/bottleneck/__main__.py @@ -1,11 +1,8 @@ import argparse import cProfile import pstats -import subprocess import sys import os -import re -import contextlib import torch from torch.autograd import profiler diff --git a/torch/utils/collect_env.py b/torch/utils/collect_env.py index 9c28124074..2c3f380895 100644 --- a/torch/utils/collect_env.py +++ b/torch/utils/collect_env.py @@ -4,8 +4,6 @@ from __future__ import absolute_import, division, print_function, unicode_litera import re import subprocess import sys -import time -import datetime import os from collections import namedtuple diff --git a/torch/utils/data/__init__.py b/torch/utils/data/__init__.py index ee58707ba3..1852aca404 100644 --- a/torch/utils/data/__init__.py +++ b/torch/utils/data/__init__.py @@ -1,4 +1,4 @@ -from .sampler import Sampler, SequentialSampler, RandomSampler, SubsetRandomSampler, WeightedRandomSampler, BatchSampler -from .distributed import DistributedSampler -from .dataset import Dataset, TensorDataset, ConcatDataset, Subset, random_split -from .dataloader import DataLoader +from .sampler import Sampler, SequentialSampler, RandomSampler, SubsetRandomSampler, WeightedRandomSampler, BatchSampler # noqa: F401 +from .distributed import DistributedSampler # noqa: F401 +from .dataset import Dataset, TensorDataset, ConcatDataset, Subset, random_split # noqa: F401 +from .dataloader import DataLoader # noqa: F401 diff --git a/torch/utils/data/_utils/__init__.py b/torch/utils/data/_utils/__init__.py index 05b2b654c6..893b716a0b 100644 --- a/torch/utils/data/_utils/__init__.py +++ b/torch/utils/data/_utils/__init__.py @@ -58,4 +58,4 @@ def _set_python_exit_flag(): atexit.register(_set_python_exit_flag) -from . import worker, signal_handling, pin_memory, collate +from . import worker, signal_handling, pin_memory, collate # noqa: F401 diff --git a/torch/utils/data/_utils/pin_memory.py b/torch/utils/data/_utils/pin_memory.py index f762aff315..979044e7a4 100644 --- a/torch/utils/data/_utils/pin_memory.py +++ b/torch/utils/data/_utils/pin_memory.py @@ -7,7 +7,7 @@ static methods. import torch from torch._six import queue, container_abcs, string_classes -from . import collate, MP_STATUS_CHECK_INTERVAL, ExceptionWrapper +from . import MP_STATUS_CHECK_INTERVAL, ExceptionWrapper def _pin_memory_loop(in_queue, out_queue, device_id, done_event): diff --git a/torch/utils/data/_utils/signal_handling.py b/torch/utils/data/_utils/signal_handling.py index 9364733d92..3f8a90e214 100644 --- a/torch/utils/data/_utils/signal_handling.py +++ b/torch/utils/data/_utils/signal_handling.py @@ -32,9 +32,7 @@ multiprocessing data loading robust to errors. import signal import threading -import torch -from torch._C import _set_worker_pids, _remove_worker_pids, \ - _error_if_any_worker_fails, _set_worker_signal_handlers +from torch._C import _set_worker_pids, _remove_worker_pids, _error_if_any_worker_fails, _set_worker_signal_handlers # noqa: F401 from . import IS_WINDOWS |