summaryrefslogtreecommitdiff
path: root/torch/autograd
diff options
context:
space:
mode:
authorEdward Yang <ezyang@fb.com>2019-03-30 08:58:10 -0700
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>2019-03-30 09:01:17 -0700
commit173f224570017b4b1a3a1a13d0bff280a54d9cd9 (patch)
treea26ac171d14015f51edbd710f788613d6517f4cf /torch/autograd
parent96456bfa4cf9394c9c926b143cf724a09901908d (diff)
downloadpytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.tar.gz
pytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.tar.bz2
pytorch-173f224570017b4b1a3a1a13d0bff280a54d9cd9.zip
Turn on F401: Unused import warning. (#18598)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/18598 ghimport-source-id: c74597e5e7437e94a43c163cee0639b20d0d0c6a Stack from [ghstack](https://github.com/ezyang/ghstack): * **#18598 Turn on F401: Unused import warning.** This was requested by someone at Facebook; this lint is turned on for Facebook by default. "Sure, why not." I had to noqa a number of imports in __init__. Hypothetically we're supposed to use __all__ in this case, but I was too lazy to fix it. Left for future work. Be careful! flake8-2 and flake8-3 behave differently with respect to import resolution for # type: comments. flake8-3 will report an import unused; flake8-2 will not. For now, I just noqa'd all these sites. All the changes were done by hand. Signed-off-by: Edward Z. Yang <ezyang@fb.com> Differential Revision: D14687478 fbshipit-source-id: 30d532381e914091aadfa0d2a5a89404819663e3
Diffstat (limited to 'torch/autograd')
-rw-r--r--torch/autograd/__init__.py10
-rw-r--r--torch/autograd/_functions/__init__.py2
-rw-r--r--torch/autograd/_functions/utils.py1
-rw-r--r--torch/autograd/gradcheck.py1
-rw-r--r--torch/autograd/profiler.py5
5 files changed, 6 insertions, 13 deletions
diff --git a/torch/autograd/__init__.py b/torch/autograd/__init__.py
index 0fe63a877b..f55ed63b4a 100644
--- a/torch/autograd/__init__.py
+++ b/torch/autograd/__init__.py
@@ -8,11 +8,11 @@ import torch
import warnings
from .variable import Variable
-from .function import Function, NestedIOFunction
-from .gradcheck import gradcheck, gradgradcheck
-from .grad_mode import no_grad, enable_grad, set_grad_enabled
-from .anomaly_mode import detect_anomaly, set_detect_anomaly
-from . import profiler
+from .function import Function, NestedIOFunction # noqa: F401
+from .gradcheck import gradcheck, gradgradcheck # noqa: F401
+from .grad_mode import no_grad, enable_grad, set_grad_enabled # noqa: F401
+from .anomaly_mode import detect_anomaly, set_detect_anomaly # noqa: F401
+from . import profiler # noqa: F401
__all__ = ['Variable', 'Function', 'backward', 'grad_mode']
diff --git a/torch/autograd/_functions/__init__.py b/torch/autograd/_functions/__init__.py
index c0417004f8..be419197ea 100644
--- a/torch/autograd/_functions/__init__.py
+++ b/torch/autograd/_functions/__init__.py
@@ -1 +1 @@
-from .tensor import *
+from .tensor import * # noqa: F401
diff --git a/torch/autograd/_functions/utils.py b/torch/autograd/_functions/utils.py
index 55c85e766b..cb571d1934 100644
--- a/torch/autograd/_functions/utils.py
+++ b/torch/autograd/_functions/utils.py
@@ -1,4 +1,3 @@
-import torch
from functools import reduce
diff --git a/torch/autograd/gradcheck.py b/torch/autograd/gradcheck.py
index 22cd8e2691..20ea632831 100644
--- a/torch/autograd/gradcheck.py
+++ b/torch/autograd/gradcheck.py
@@ -1,7 +1,6 @@
import torch
from torch._six import container_abcs, istuple
import torch.testing
-import sys
from itertools import product
import warnings
diff --git a/torch/autograd/profiler.py b/torch/autograd/profiler.py
index 567028666b..1feb171b7c 100644
--- a/torch/autograd/profiler.py
+++ b/torch/autograd/profiler.py
@@ -1,12 +1,7 @@
-import subprocess
-import re
-import os
-import sys
import itertools
from collections import defaultdict, namedtuple
import torch
-from torch._six import FileNotFoundError
class range(object):