summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/autograd/templates/Functions.cpp12
-rw-r--r--tools/autograd/templates/VariableType.cpp20
2 files changed, 14 insertions, 18 deletions
diff --git a/tools/autograd/templates/Functions.cpp b/tools/autograd/templates/Functions.cpp
index 279f2c3012..aeae0dc792 100644
--- a/tools/autograd/templates/Functions.cpp
+++ b/tools/autograd/templates/Functions.cpp
@@ -36,14 +36,14 @@ struct IndexRangeGenerator {
};
void copy_range(variable_list& out, IndexRange range, const Tensor & t) {
- AT_ASSERT(range.second <= out.size(), "range out of bounds");
- AT_ASSERT(range.second - range.first == 1, "inconsistent range for Tensor output");
+ AT_ASSERT(range.second <= out.size());
+ AT_ASSERTM(range.second - range.first == 1, "inconsistent range for Tensor output");
out[range.first] = t;
}
void copy_range(variable_list& out, IndexRange range, at::ArrayRef<Tensor> t) {
- AT_ASSERT(range.second <= out.size(), "range out of bounds");
- AT_ASSERT(range.second - range.first == t.size(), "inconsistent range for TensorList output");
+ AT_ASSERT(range.second <= out.size());
+ AT_ASSERTM(range.second - range.first == t.size(), "inconsistent range for TensorList output");
std::copy(t.begin(), t.end(), out.begin() + range.first);
}
@@ -972,7 +972,7 @@ Tensor logdet_backward(const Tensor & grad, const Tensor& self, const Tensor& lo
Tensor slogdet_backward(const std::vector<torch::autograd::Variable> &grads,
const Tensor& self,
const Tensor& signdet, const Tensor& logabsdet) {
- AT_ASSERT(!grads[0].defined(), "slogdet's sign output should never have gradient");
+ AT_ASSERTM(!grads[0].defined(), "slogdet's sign output should never have gradient");
auto signdet_val = signdet.toCDouble();
if (signdet_val != 0 /* det != 0, invertible */) {
return grads[1] * self.inverse().t();
@@ -1275,7 +1275,7 @@ std::tuple<Tensor, Tensor, Tensor> batchnorm_double_backward(
if (output_mask[0] && !ggO.defined()) ggO = at::zeros_like(gO);
if (output_mask[1] && !gG.defined()) {
- AT_ASSERT(affine, "gamma should always be defined when it requires grad");
+ AT_ASSERTM(affine, "gamma should always be defined when it requires grad");
gG = at::zeros_like(gamma);
}
if (output_mask[2] && !gI.defined()) gI = at::zeros_like(input);
diff --git a/tools/autograd/templates/VariableType.cpp b/tools/autograd/templates/VariableType.cpp
index 9846170564..6235fc8fa1 100644
--- a/tools/autograd/templates/VariableType.cpp
+++ b/tools/autograd/templates/VariableType.cpp
@@ -157,12 +157,10 @@ std::vector<at::Type*> VariableType::allTypes() {
Variable & VariableType::checked_cast_variable(const Tensor & t, const char * name, int pos) {
if (!t.defined()) {
- AT_ERROR("Expected a Tensor of type Variable but found an undefined Tensor for argument #%d '%s'",
- pos, name);
+ AT_ERROR("Expected a Tensor of type Variable but found an undefined Tensor for argument #", pos, " '", name, "'");
}
if (!isVariableType(t.type())) {
- AT_ERROR("Expected object of type Variable but found type %s for argument #%d '%s'",
- t.type().toString(), pos, name);
+ AT_ERROR("Expected object of type Variable but found type ", t.type().toString(), " for argument #", pos, " '", name, "'");
}
return as_variable_ref(const_cast<Tensor&>(t));
}
@@ -187,14 +185,12 @@ std::vector<at::Tensor> VariableType::unpack(at::TensorList tl, const char *name
for (size_t i = 0; i < tl.size(); ++i) {
const auto &t = tl[i];
if (!t.defined()) {
- AT_ERROR("Expected a Tensor of type Variable but found an undefined Tensor at position #%d "
- "for iterable argument #%d '%s'",
- i, pos, name);
+ AT_ERROR("Expected a Tensor of type Variable but found an undefined Tensor at position #", i, " "
+ "for iterable argument #", pos, " '", name, "'");
}
if (!isVariableType(t.type())) {
- AT_ERROR("Expected object of type Variable but found type %s at position #%d "
- "for iterable argument #%d '%s'",
- t.type().toString(), i, pos, name);
+ AT_ERROR("Expected object of type Variable but found type ", t.type().toString(), " at position #", i, " "
+ "for iterable argument #", pos, " '", name, "'");
}
ret[i] = static_cast<const Variable&>(t).data();
}
@@ -288,8 +284,8 @@ static void check_inplace(const Tensor& tensor) {
static void throw_error_out_requires_grad(const char* name) {
AT_ERROR(
- "%s(): functions with out=... arguments don't support automatic differentiation, "
- "but one of the arguments requires grad.", name);
+ name, "(): functions with out=... arguments don't support automatic differentiation, "
+ "but one of the arguments requires grad.");
}
static void rebase_history(Tensor& tensor, std::shared_ptr<Function> grad_fn) {