diff options
author | Luke Yeager <lukeyeager@users.noreply.github.com> | 2017-01-27 12:52:21 -0800 |
---|---|---|
committer | Adam Paszke <adam.paszke@gmail.com> | 2017-01-28 01:15:51 +0100 |
commit | e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c (patch) | |
tree | 288961cec717b4e0e88670d07e30c3233b4c0729 /torch/_thnn | |
parent | f1d0d73ed72ed6e7208a5c52aa849dca94772666 (diff) | |
download | pytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.tar.gz pytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.tar.bz2 pytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.zip |
[pep8] Fix most lint automatically with autopep8
Here's the command I used to invoke autopep8 (in parallel!):
git ls-files | grep '\.py$' | xargs -n1 -P`nproc` autopep8 -i
Several rules are ignored in setup.cfg. The goal is to let autopep8
handle everything which it can handle safely, and to disable any rules
which are tricky or controversial to address. We may want to come back
and re-enable some of these rules later, but I'm trying to make this
patch as safe as possible.
Also configures flake8 to match pep8's behavior.
Also configures TravisCI to check the whole project for lint.
Diffstat (limited to 'torch/_thnn')
-rw-r--r-- | torch/_thnn/__init__.py | 6 | ||||
-rw-r--r-- | torch/_thnn/utils.py | 3 |
2 files changed, 8 insertions, 1 deletions
diff --git a/torch/_thnn/__init__.py b/torch/_thnn/__init__.py index 97474692eb..dd41e47a27 100644 --- a/torch/_thnn/__init__.py +++ b/torch/_thnn/__init__.py @@ -2,7 +2,9 @@ import threading import torch.cuda from .utils import THNN_H_PATH, THCUNN_H_PATH, parse_header, load_backend + class Backends(object): + def __init__(self): self.backends = {} @@ -14,6 +16,7 @@ class Backends(object): class Backend(object): + def __init__(self, lib_prefix, lib_name, functions, mixins=tuple()): self.lib_prefix = lib_prefix self.lib_name = lib_name @@ -32,11 +35,12 @@ class Backend(object): with self.loading_lock: if self.backend is None: self.backend = load_backend(self.lib_prefix, self.lib_name, - self.functions, self.mixins) + self.functions, self.mixins) return self.backend class THNNCudaBackendStateMixin(object): + @property def library_state(self): return torch.cuda._state_cdata diff --git a/torch/_thnn/utils.py b/torch/_thnn/utils.py index c62fc2a29e..66d527a704 100644 --- a/torch/_thnn/utils.py +++ b/torch/_thnn/utils.py @@ -12,6 +12,7 @@ def _unpickle_backend(backend_name): class THNNBackendBase(object): + def __init__(self): self.methods = {} @@ -33,6 +34,7 @@ class THNNBackendBase(object): class Function(object): + def __init__(self, name): self.name = name self.arguments = [] @@ -46,6 +48,7 @@ class Function(object): class Argument(object): + def __init__(self, _type, name, is_optional): self.type = _type self.name = name |