diff options
author | Gregory Chanan <gchanan@fb.com> | 2017-06-01 09:11:43 -0700 |
---|---|---|
committer | Soumith Chintala <soumith@gmail.com> | 2017-06-11 05:37:59 -0400 |
commit | deec86cc05ed44b790c5ddd668054214d78ec491 (patch) | |
tree | 3b3002ef724b37e3b8d837521aaa976dfa68651c /tools/cwrap | |
parent | 7da46097fe6145cb211dbe977405d2646eabb270 (diff) | |
download | pytorch-deec86cc05ed44b790c5ddd668054214d78ec491.tar.gz pytorch-deec86cc05ed44b790c5ddd668054214d78ec491.tar.bz2 pytorch-deec86cc05ed44b790c5ddd668054214d78ec491.zip |
Clarify a number of comments.
Diffstat (limited to 'tools/cwrap')
-rw-r--r-- | tools/cwrap/plugins/Broadcast.py | 11 |
1 files changed, 7 insertions, 4 deletions
diff --git a/tools/cwrap/plugins/Broadcast.py b/tools/cwrap/plugins/Broadcast.py index da14222763..9bd2a377f4 100644 --- a/tools/cwrap/plugins/Broadcast.py +++ b/tools/cwrap/plugins/Broadcast.py @@ -10,10 +10,13 @@ from string import Template # argument to be broadcast # [fallback] if tensors aren't broadcastable, preserves "element number" pointwise behavior, # where only number of elements need to match, and tensors are viewed as 1-dimensional. -# [dims] if the tensors shouldn't be broadcast to specific tensor or tensors, but a combination -# of their individual dimensions. Each dimension is specified as [arg].dim[#] and dimensions -# are comma-separated. So, to specify that the tensor should be broadcast to 3-dimensions with -# sizes: tensor0->size[0] x tensor1->size[1] x tensor2->size[2], you would write: +# [dims] specify if the tensors shouldn't be broadcast to a specific tensor or tensors, but a combination +# of individual dimension sizes of a set of tensors. For example: addbmm(C,A,B) a.k.a. [C + A @ B] +# broadcasts C to the first dimension of A and the second dimension of B. Each dimension is specified as +# [arg].dim[#] and dimensions are comma-separated. So, to specify that the tensor should be +# broadcast to 3-dimensions with sizes: +# tensor0->size[0] x tensor1->size[1] x tensor2->size[2] +# you would write: # dims:tensor0.dim0,tensor1.dim1,tensor2.dim2 # [types] if the tensors should be of different types than THTensor, specify as X where # the actual type to use is THXTensor (i.e. Byte for THByteTensor). If the type |