diff options
author | Tongzhou Wang <SsnL@users.noreply.github.com> | 2018-02-26 19:32:08 -0500 |
---|---|---|
committer | Soumith Chintala <soumith@gmail.com> | 2018-02-26 19:32:08 -0500 |
commit | 8c18220a59f4a1ee1aca905cce32865499333aca (patch) | |
tree | 55b03f7d6fbcb491015f5c11a91c191eeedaee8e /docs | |
parent | 611c771fc8b9940005c0e2431e6a27495dd0ac5c (diff) | |
download | pytorch-8c18220a59f4a1ee1aca905cce32865499333aca.tar.gz pytorch-8c18220a59f4a1ee1aca905cce32865499333aca.tar.bz2 pytorch-8c18220a59f4a1ee1aca905cce32865499333aca.zip |
Fix layer_norm initialization and nn.Module docs (#5422)
* Fix LN initialization; Support single int normalized_shape
* disable docstring inheritance
* fix sphinx warnings
Diffstat (limited to 'docs')
-rw-r--r-- | docs/source/conf.py | 5 | ||||
-rw-r--r-- | docs/source/nn.rst | 2 | ||||
-rw-r--r-- | docs/source/notes/faq.rst | 5 |
3 files changed, 8 insertions, 4 deletions
diff --git a/docs/source/conf.py b/docs/source/conf.py index 1f93339d33..d19032fba6 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -66,7 +66,7 @@ master_doc = 'index' # General information about the project. project = 'PyTorch' -copyright = '2017, Torch Contributors' +copyright = '2018, Torch Contributors' author = 'Torch Contributors' # The version info for the project you're documenting, acts as replacement for @@ -98,6 +98,9 @@ pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True +# Disable docstring inheritance +autodoc_inherit_docstrings = False + # -- Options for HTML output ---------------------------------------------- diff --git a/docs/source/nn.rst b/docs/source/nn.rst index 5fcd62785a..bb645b88d6 100644 --- a/docs/source/nn.rst +++ b/docs/source/nn.rst @@ -950,7 +950,7 @@ Normalization functions .. autofunction:: batch_norm :hidden:`instance_norm` -~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~ .. autofunction:: instance_norm diff --git a/docs/source/notes/faq.rst b/docs/source/notes/faq.rst index 1c0ef9c1d4..85917ff34b 100644 --- a/docs/source/notes/faq.rst +++ b/docs/source/notes/faq.rst @@ -21,7 +21,7 @@ Sometimes, it can be non-obvious when differentiable variables can occur. Consider the following training loop (abridged from `source <https://discuss.pytorch.org/t/high-memory-usage-while-training/162>`_): -.. code:: +.. code-block:: python total_loss = 0 for i in range(10000): @@ -52,7 +52,8 @@ you don't need. The scopes of locals can be larger than you expect. For example: -.. code:: +.. code-block:: python + for i in range(5): intermediate = f(input[i]) result += g(intermediate) |