Skip to content

Commit fcb5bb8

Browse files
committed
Generate Python docs from pytorch/pytorch@567520f
1 parent 01ee424 commit fcb5bb8

File tree

830 files changed

+2453
-910
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

830 files changed

+2453
-910
lines changed

docs/master/__config__.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@
188188

189189

190190
<div class="version">
191-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
191+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
192192
</div>
193193

194194

docs/master/_modules/index.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch.html

+25-6
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

@@ -420,7 +420,8 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
420420
<span class="s1">&#39;DoubleTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;FloatTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;LongTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;IntTensor&#39;</span><span class="p">,</span>
421421
<span class="s1">&#39;ShortTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;CharTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;ByteTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;BoolTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;Tensor&#39;</span><span class="p">,</span>
422422
<span class="s1">&#39;lobpcg&#39;</span><span class="p">,</span> <span class="s1">&#39;use_deterministic_algorithms&#39;</span><span class="p">,</span> <span class="s1">&#39;set_deterministic&#39;</span><span class="p">,</span>
423-
<span class="s1">&#39;are_deterministic_algorithms_enabled&#39;</span><span class="p">,</span> <span class="s1">&#39;is_deterministic&#39;</span>
423+
<span class="s1">&#39;are_deterministic_algorithms_enabled&#39;</span><span class="p">,</span> <span class="s1">&#39;is_deterministic&#39;</span><span class="p">,</span>
424+
<span class="s1">&#39;set_warn_always&#39;</span><span class="p">,</span> <span class="s1">&#39;is_warn_always_enabled&#39;</span><span class="p">,</span>
424425
<span class="p">]</span>
425426

426427
<span class="c1">################################################################################</span>
@@ -803,11 +804,11 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
803804

804805
<span class="n">use_deterministic_algorithms</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
805806

806-
<span class="k">def</span> <span class="nf">are_deterministic_algorithms_enabled</span><span class="p">():</span>
807+
<div class="viewcode-block" id="are_deterministic_algorithms_enabled"><a class="viewcode-back" href="../generated/torch.are_deterministic_algorithms_enabled.html#torch.are_deterministic_algorithms_enabled">[docs]</a><span class="k">def</span> <span class="nf">are_deterministic_algorithms_enabled</span><span class="p">():</span>
807808
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global deterministic flag is turned on. Refer to</span>
808809
<span class="sd"> :func:`torch.use_deterministic_algorithms` documentation for more details.</span>
809810
<span class="sd"> &quot;&quot;&quot;</span>
810-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_deterministic_algorithms</span><span class="p">()</span>
811+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_deterministic_algorithms</span><span class="p">()</span></div>
811812

812813
<span class="k">def</span> <span class="nf">is_deterministic</span><span class="p">():</span>
813814
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;This function is deprecated and will be removed in a future release.</span>
@@ -819,6 +820,24 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
819820
<span class="k">return</span> <span class="n">are_deterministic_algorithms_enabled</span><span class="p">()</span>
820821

821822

823+
<div class="viewcode-block" id="set_warn_always"><a class="viewcode-back" href="../generated/torch.set_warn_always.html#torch.set_warn_always">[docs]</a><span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
824+
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;When this flag is False (default) then some PyTorch warnings may only</span>
825+
<span class="sd"> appear once per process. This helps avoid excessive warning information.</span>
826+
<span class="sd"> Setting it to True causes these warnings to always appear, which may be</span>
827+
<span class="sd"> helpful when debugging.</span>
828+
829+
<span class="sd"> Args:</span>
830+
<span class="sd"> b (:class:`bool`): If True, force warnings to always be emitted</span>
831+
<span class="sd"> If False, set to the default behaviour</span>
832+
<span class="sd"> &quot;&quot;&quot;</span>
833+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span></div>
834+
835+
<div class="viewcode-block" id="is_warn_always_enabled"><a class="viewcode-back" href="../generated/torch.is_warn_always_enabled.html#torch.is_warn_always_enabled">[docs]</a><span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
836+
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global warn_always flag is turned on. Refer to</span>
837+
<span class="sd"> :func:`torch.set_warn_always` documentation for more details.</span>
838+
<span class="sd"> &quot;&quot;&quot;</span>
839+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_warnAlways</span><span class="p">()</span></div>
840+
822841
<span class="c1">################################################################################</span>
823842
<span class="c1"># Define Storage and Tensor classes</span>
824843
<span class="c1">################################################################################</span>
@@ -1015,9 +1034,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
10151034
<span class="k">del</span> <span class="n">_torch_docs</span><span class="p">,</span> <span class="n">_tensor_docs</span><span class="p">,</span> <span class="n">_storage_docs</span>
10161035

10171036

1018-
<div class="viewcode-block" id="compiled_with_cxx11_abi"><a class="viewcode-back" href="../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi">[docs]</a><span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
1037+
<span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
10191038
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1&quot;&quot;&quot;</span>
1020-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span></div>
1039+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span>
10211040

10221041

10231042
<span class="c1"># Import the ops &quot;namespace&quot;</span>

docs/master/_modules/torch/__config__.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/_jit_internal.html

+2-2
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

@@ -395,11 +395,11 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
395395
<span class="kn">from</span> <span class="nn">textwrap</span> <span class="kn">import</span> <span class="n">dedent</span>
396396
<span class="kn">import</span> <span class="nn">torch</span>
397397
<span class="kn">import</span> <span class="nn">sys</span>
398+
<span class="kn">import</span> <span class="nn">builtins</span>
398399
<span class="c1"># This is needed. `torch._jit_internal` is imported before `torch.distributed.__init__`.</span>
399400
<span class="c1"># Explicitly ask to import `torch.distributed.__init__` first.</span>
400401
<span class="c1"># Otherwise, &quot;AttributeError: module &#39;torch&#39; has no attribute &#39;distributed&#39;&quot; is raised.</span>
401402
<span class="kn">import</span> <span class="nn">torch.distributed.rpc</span>
402-
<span class="kn">from</span> <span class="nn">torch._six</span> <span class="kn">import</span> <span class="n">builtins</span>
403403
<span class="kn">from</span> <span class="nn">torch._utils_internal</span> <span class="kn">import</span> <span class="n">get_source_lines_and_file</span>
404404
<span class="kn">from</span> <span class="nn">torch.futures</span> <span class="kn">import</span> <span class="n">Future</span>
405405
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">Dict</span><span class="p">,</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">Union</span><span class="p">,</span> <span class="n">Any</span><span class="p">,</span> <span class="n">TypeVar</span><span class="p">,</span> <span class="n">Generic</span><span class="p">,</span> <span class="n">Callable</span> <span class="c1"># noqa: F401</span>

docs/master/_modules/torch/_lobpcg.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/_lowrank.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/_tensor_str.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/_utils.html

+1-2
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

@@ -380,7 +380,6 @@
380380

381381
<h1>Source code for torch._utils</h1><div class="highlight"><pre>
382382
<span></span><span class="kn">import</span> <span class="nn">torch</span>
383-
<span class="kn">import</span> <span class="nn">torch._six</span>
384383
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">List</span><span class="p">,</span> <span class="n">DefaultDict</span>
385384
<span class="kn">import</span> <span class="nn">warnings</span>
386385
<span class="kn">from</span> <span class="nn">collections</span> <span class="kn">import</span> <span class="n">defaultdict</span>

docs/master/_modules/torch/_vmap_internals.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/autograd.html

+6-7
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

@@ -607,12 +607,11 @@ <h1>Source code for torch.autograd</h1><div class="highlight"><pre>
607607

608608

609609
<span class="c1"># This function applies in case of gradient checkpointing for memory</span>
610-
<span class="c1"># optimization. Currently, for gradient checkpointing, we only support imperative</span>
611-
<span class="c1"># backwards call i.e. torch.autograd.backward() and the torch.autograd.grad() won&#39;t</span>
612-
<span class="c1"># work. The reason being that: torch.autograd.grad() only calculates the grads</span>
613-
<span class="c1"># for the inputs that are passed by user but it doesn&#39;t calculate grad for</span>
614-
<span class="c1"># anything else e.g. model parameters like weights, bias etc. However, for</span>
615-
<span class="c1"># torch.autograd.backward(), we would actually compute the grad for the weights as well.</span>
610+
<span class="c1"># optimization. Currently, gradient checkpointing is supported only if the</span>
611+
<span class="c1"># execution engine is invoked through torch.autograd.backward() and its</span>
612+
<span class="c1"># inputs argument is not passed. It is not supported for torch.autograd.grad().</span>
613+
<span class="c1"># This is because if inputs are specified, the gradient won&#39;t be calculated for</span>
614+
<span class="c1"># anything else e.g. model parameters like weights, bias etc.</span>
616615
<span class="c1">#</span>
617616
<span class="c1"># This function returns whether the checkpointing is valid i.e. torch.autograd.backward</span>
618617
<span class="c1"># or not i.e. torch.autograd.grad. The implementation works by maintaining a thread</span>

docs/master/_modules/torch/autograd/anomaly_mode.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/autograd/function.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/autograd/functional.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

docs/master/_modules/torch/autograd/grad_mode.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187

188188

189189
<div class="version">
190-
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+71e7186 &#x25BC</a>
190+
<a href='http://pytorch.org/docs/versions.html'>1.8.0a0+567520f &#x25BC</a>
191191
</div>
192192

193193

0 commit comments

Comments
 (0)