summaryrefslogtreecommitdiff
path: root/torch/legacy/nn/SoftMin.py
diff options
context:
space:
mode:
authorLuke Yeager <lukeyeager@users.noreply.github.com>2017-01-27 12:52:21 -0800
committerAdam Paszke <adam.paszke@gmail.com>2017-01-28 01:15:51 +0100
commite7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c (patch)
tree288961cec717b4e0e88670d07e30c3233b4c0729 /torch/legacy/nn/SoftMin.py
parentf1d0d73ed72ed6e7208a5c52aa849dca94772666 (diff)
downloadpytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.tar.gz
pytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.tar.bz2
pytorch-e7c1e6a8e39df0d206efe247f5eb0481eb8b8b6c.zip
[pep8] Fix most lint automatically with autopep8
Here's the command I used to invoke autopep8 (in parallel!): git ls-files | grep '\.py$' | xargs -n1 -P`nproc` autopep8 -i Several rules are ignored in setup.cfg. The goal is to let autopep8 handle everything which it can handle safely, and to disable any rules which are tricky or controversial to address. We may want to come back and re-enable some of these rules later, but I'm trying to make this patch as safe as possible. Also configures flake8 to match pep8's behavior. Also configures TravisCI to check the whole project for lint.
Diffstat (limited to 'torch/legacy/nn/SoftMin.py')
-rw-r--r--torch/legacy/nn/SoftMin.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/torch/legacy/nn/SoftMin.py b/torch/legacy/nn/SoftMin.py
index a6e8737fe7..7c1bbbff3f 100644
--- a/torch/legacy/nn/SoftMin.py
+++ b/torch/legacy/nn/SoftMin.py
@@ -2,6 +2,7 @@ import torch
from .Module import Module
from .utils import clear
+
class SoftMin(Module):
def __init__(self):
@@ -10,7 +11,7 @@ class SoftMin(Module):
def updateOutput(self, input):
if self.mininput is None:
- self.mininput = input.new()
+ self.mininput = input.new()
self.mininput.resize_as_(input).copy_(input).mul_(-1)
self._backend.SoftMax_updateOutput(
self._backend.library_state,
@@ -21,7 +22,7 @@ class SoftMin(Module):
def updateGradInput(self, input, gradOutput):
if self.mininput is None:
- self.mininput = input.new()
+ self.mininput = input.new()
self.mininput.resize_as_(input).copy_(input).mul_(-1)
self._backend.SoftMax_updateGradInput(
self._backend.library_state,
@@ -37,4 +38,3 @@ class SoftMin(Module):
def clearState(self):
clear(self, 'mininput')
return super(SoftMin, self).clearState()
-