Skip to content

Commit b9f15e8

Browse files
committed
Generate Python docs from pytorch/pytorch@dfbd030
1 parent 79248fc commit b9f15e8

File tree

1,659 files changed

+2105
-2105
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,659 files changed

+2105
-2105
lines changed

docs/1.9.1/__config__.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@
198198
<div class="pytorch-left-menu-search">
199199

200200
<div class="version">
201-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
201+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
202202
</div>
203203

204204

docs/1.9.1/_modules/index.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

docs/1.9.1/_modules/torch.html

+17-17
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

@@ -654,7 +654,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
654654
<span class="k">return</span> <span class="n">module</span> <span class="o">+</span> <span class="n">class_name</span>
655655

656656

657-
<div class="viewcode-block" id="is_tensor"><a class="viewcode-back" href="../generated/torch.is_tensor.html#torch.is_tensor">[docs]</a><span class="k">def</span> <span class="nf">is_tensor</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
657+
<span class="k">def</span> <span class="nf">is_tensor</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
658658
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if `obj` is a PyTorch tensor.</span>
659659

660660
<span class="sd"> Note that this function is simply doing ``isinstance(obj, Tensor)``.</span>
@@ -671,19 +671,19 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
671671
<span class="sd"> True</span>
672672

673673
<span class="sd"> &quot;&quot;&quot;</span>
674-
<span class="k">return</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span></div>
674+
<span class="k">return</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span>
675675

676676

677-
<div class="viewcode-block" id="is_storage"><a class="viewcode-back" href="../generated/torch.is_storage.html#torch.is_storage">[docs]</a><span class="k">def</span> <span class="nf">is_storage</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
677+
<span class="k">def</span> <span class="nf">is_storage</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
678678
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if `obj` is a PyTorch storage object.</span>
679679

680680
<span class="sd"> Args:</span>
681681
<span class="sd"> obj (Object): Object to test</span>
682682
<span class="sd"> &quot;&quot;&quot;</span>
683-
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span></div>
683+
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span>
684684

685685

686-
<span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
686+
<div class="viewcode-block" id="set_default_tensor_type"><a class="viewcode-back" href="../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type">[docs]</a><span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
687687
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default ``torch.Tensor`` type to floating point tensor type</span>
688688
<span class="sd"> ``t``. This type will also be used as default floating point type for</span>
689689
<span class="sd"> type inference in :func:`torch.tensor`.</span>
@@ -704,10 +704,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
704704
<span class="sd"> &quot;&quot;&quot;</span>
705705
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">_string_classes</span><span class="p">):</span>
706706
<span class="n">t</span> <span class="o">=</span> <span class="n">_import_dotted_name</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
707-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
707+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span></div>
708708

709709

710-
<span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
710+
<div class="viewcode-block" id="set_default_dtype"><a class="viewcode-back" href="../generated/torch.set_default_dtype.html#torch.set_default_dtype">[docs]</a><span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
711711
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default floating point dtype to :attr:`d`.</span>
712712
<span class="sd"> This dtype is:</span>
713713

@@ -735,9 +735,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
735735
<span class="sd"> torch.complex128</span>
736736

737737
<span class="sd"> &quot;&quot;&quot;</span>
738-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
738+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span></div>
739739

740-
<span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
740+
<div class="viewcode-block" id="use_deterministic_algorithms"><a class="viewcode-back" href="../generated/torch.use_deterministic_algorithms.html#torch.use_deterministic_algorithms">[docs]</a><span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
741741
<span class="sa">r</span><span class="sd">&quot;&quot;&quot; Sets whether PyTorch operations must use &quot;deterministic&quot;</span>
742742
<span class="sd"> algorithms. That is, algorithms which, given the same input, and when</span>
743743
<span class="sd"> run on the same software and hardware, always produce the same output.</span>
@@ -848,7 +848,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
848848
<span class="sd"> ...</span>
849849
<span class="sd"> RuntimeError: index_add_cuda_ does not have a deterministic implementation...</span>
850850
<span class="sd"> &quot;&quot;&quot;</span>
851-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span>
851+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span></div>
852852

853853
<span class="k">def</span> <span class="nf">set_deterministic</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
854854
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;This function is deprecated and will be removed in a future release.</span>
@@ -876,7 +876,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
876876
<span class="k">return</span> <span class="n">are_deterministic_algorithms_enabled</span><span class="p">()</span>
877877

878878

879-
<span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
879+
<div class="viewcode-block" id="set_warn_always"><a class="viewcode-back" href="../generated/torch.set_warn_always.html#torch.set_warn_always">[docs]</a><span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
880880
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;When this flag is False (default) then some PyTorch warnings may only</span>
881881
<span class="sd"> appear once per process. This helps avoid excessive warning information.</span>
882882
<span class="sd"> Setting it to True causes these warnings to always appear, which may be</span>
@@ -886,13 +886,13 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
886886
<span class="sd"> b (:class:`bool`): If True, force warnings to always be emitted</span>
887887
<span class="sd"> If False, set to the default behaviour</span>
888888
<span class="sd"> &quot;&quot;&quot;</span>
889-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span>
889+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span></div>
890890

891-
<div class="viewcode-block" id="is_warn_always_enabled"><a class="viewcode-back" href="../generated/torch.is_warn_always_enabled.html#torch.is_warn_always_enabled">[docs]</a><span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
891+
<span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
892892
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global warn_always flag is turned on. Refer to</span>
893893
<span class="sd"> :func:`torch.set_warn_always` documentation for more details.</span>
894894
<span class="sd"> &quot;&quot;&quot;</span>
895-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_warnAlways</span><span class="p">()</span></div>
895+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_warnAlways</span><span class="p">()</span>
896896

897897
<span class="c1">################################################################################</span>
898898
<span class="c1"># Define Storage and Tensor classes</span>
@@ -1100,9 +1100,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
11001100
<span class="k">del</span> <span class="n">_torch_docs</span><span class="p">,</span> <span class="n">_tensor_docs</span><span class="p">,</span> <span class="n">_storage_docs</span>
11011101

11021102

1103-
<span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
1103+
<div class="viewcode-block" id="compiled_with_cxx11_abi"><a class="viewcode-back" href="../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi">[docs]</a><span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
11041104
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1&quot;&quot;&quot;</span>
1105-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span>
1105+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span></div>
11061106

11071107

11081108
<span class="c1"># Import the ops &quot;namespace&quot;</span>

docs/1.9.1/_modules/torch/__config__.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

docs/1.9.1/_modules/torch/_jit_internal.html

+9-9
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

@@ -833,7 +833,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
833833
<span class="s2">&quot;if this method is not scripted, copy the python method onto the scripted model&quot;</span>
834834

835835

836-
<span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
836+
<div class="viewcode-block" id="export"><a class="viewcode-back" href="../../jit.html#torch.jit.export">[docs]</a><span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
837837
<span class="sd">&quot;&quot;&quot;</span>
838838
<span class="sd"> This decorator indicates that a method on an ``nn.Module`` is used as an entry point into a</span>
839839
<span class="sd"> :class:`ScriptModule` and should be compiled.</span>
@@ -876,10 +876,10 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
876876
<span class="sd"> m = torch.jit.script(MyModule())</span>
877877
<span class="sd"> &quot;&quot;&quot;</span>
878878
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">EXPORT</span>
879-
<span class="k">return</span> <span class="n">fn</span>
879+
<span class="k">return</span> <span class="n">fn</span></div>
880880

881881

882-
<div class="viewcode-block" id="unused"><a class="viewcode-back" href="../../generated/torch.jit.unused.html#torch.jit.unused">[docs]</a><span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
882+
<span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
883883
<span class="sd">&quot;&quot;&quot;</span>
884884
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
885885
<span class="sd"> be ignored and replaced with the raising of an exception. This allows you</span>
@@ -926,7 +926,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
926926
<span class="k">return</span> <span class="n">prop</span>
927927

928928
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">UNUSED</span>
929-
<span class="k">return</span> <span class="n">fn</span></div>
929+
<span class="k">return</span> <span class="n">fn</span>
930930

931931
<span class="c1"># No op context manager from python side</span>
932932
<span class="k">class</span> <span class="nc">_IgnoreContextManager</span><span class="p">(</span><span class="n">contextlib</span><span class="o">.</span><span class="n">AbstractContextManager</span><span class="p">):</span>
@@ -936,7 +936,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
936936
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
937937
<span class="k">pass</span>
938938

939-
<div class="viewcode-block" id="ignore"><a class="viewcode-back" href="../../generated/torch.jit.ignore.html#torch.jit.ignore">[docs]</a><span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
939+
<span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
940940
<span class="sd">&quot;&quot;&quot;</span>
941941
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
942942
<span class="sd"> be ignored and left as a Python function. This allows you to leave code in</span>
@@ -1027,7 +1027,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
10271027
<span class="k">else</span><span class="p">:</span>
10281028
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">IGNORE</span>
10291029
<span class="k">return</span> <span class="n">fn</span>
1030-
<span class="k">return</span> <span class="n">decorator</span></div>
1030+
<span class="k">return</span> <span class="n">decorator</span>
10311031

10321032

10331033
<span class="k">def</span> <span class="nf">_copy_to_script_wrapper</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
@@ -1266,7 +1266,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
12661266
<span class="nb">globals</span><span class="p">()[</span><span class="sa">f</span><span class="s2">&quot;BroadcastingList</span><span class="si">{</span><span class="n">i</span><span class="si">}</span><span class="s2">&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">BroadcastingList1</span>
12671267

12681268

1269-
<span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
1269+
<div class="viewcode-block" id="is_scripting"><a class="viewcode-back" href="../../jit_language_reference.html#torch.jit.is_scripting">[docs]</a><span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
12701270
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
12711271
<span class="sd"> Function that returns True when in compilation and False otherwise. This</span>
12721272
<span class="sd"> is useful especially with the @unused decorator to leave code in your</span>
@@ -1285,7 +1285,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
12851285
<span class="sd"> else:</span>
12861286
<span class="sd"> return unsupported_linear_op(x)</span>
12871287
<span class="sd"> &quot;&quot;&quot;</span>
1288-
<span class="k">return</span> <span class="kc">False</span>
1288+
<span class="k">return</span> <span class="kc">False</span></div>
12891289

12901290

12911291
<span class="c1"># Retrieves a fully-qualified name (module hierarchy + classname) for a given obj.</span>

docs/1.9.1/_modules/torch/_lobpcg.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

docs/1.9.1/_modules/torch/_lowrank.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@
197197
<div class="pytorch-left-menu-search">
198198

199199
<div class="version">
200-
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gite2cb357 &#x25BC</a>
200+
<a href='https://pytorch.org/docs/versions.html'>1.9.0a0+gitdfbd030 &#x25BC</a>
201201
</div>
202202

203203

0 commit comments

Comments
 (0)