Skip to content

Commit 83be1bf

Browse files
committed
Generate Python docs from pytorch/pytorch@539076e
1 parent 470c534 commit 83be1bf

File tree

2,027 files changed

+3680
-6291
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,027 files changed

+3680
-6291
lines changed

docs/master/_images/RReLU.png

-44 Bytes
Loading

docs/master/_modules/index.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch.html

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

@@ -1340,7 +1340,8 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
13401340
<span class="c1"># buffer = z</span>
13411341
<span class="c1"># return min - torch.log1p(z), buffer</span>
13421342
<span class="c1"># ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ &lt;--- HERE</span>
1343-
<span class="k">if</span> <span class="p">(</span><span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;PYTORCH_JIT&quot;</span><span class="p">,</span> <span class="s2">&quot;1&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="s2">&quot;1&quot;</span> <span class="ow">and</span>
1343+
<span class="c1"># Currently broken for 3.11, see https://github.com/pytorch/pytorch/issues/85506</span>
1344+
<span class="k">if</span> <span class="p">(</span><span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;PYTORCH_JIT&quot;</span><span class="p">,</span> <span class="s2">&quot;1&quot;</span> <span class="k">if</span> <span class="n">sys</span><span class="o">.</span><span class="n">version_info</span> <span class="o">&lt;</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">11</span><span class="p">)</span> <span class="k">else</span> <span class="s2">&quot;0&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="s2">&quot;1&quot;</span> <span class="ow">and</span>
13441345
<span class="n">__debug__</span> <span class="ow">and</span>
13451346
<span class="ow">not</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_is_deploy_enabled</span><span class="p">()</span> <span class="ow">and</span>
13461347
<span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;PYTORCH_DISABLE_LIBRARY&#39;</span><span class="p">,</span> <span class="s2">&quot;0&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="s2">&quot;0&quot;</span><span class="p">):</span>
@@ -1384,7 +1385,12 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
13841385
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">_masked</span>
13851386

13861387
<span class="c1"># Import removed ops with error message about removal</span>
1387-
<span class="kn">from</span> <span class="nn">._linalg_utils</span> <span class="kn">import</span> <span class="n">eig</span><span class="p">,</span> <span class="n">solve</span>
1388+
<span class="kn">from</span> <span class="nn">._linalg_utils</span> <span class="kn">import</span> <span class="p">(</span> <span class="c1"># type: ignore[misc]</span>
1389+
<span class="n">matrix_rank</span><span class="p">,</span>
1390+
<span class="n">eig</span><span class="p">,</span>
1391+
<span class="n">solve</span><span class="p">,</span>
1392+
<span class="n">lstsq</span><span class="p">,</span>
1393+
<span class="p">)</span>
13881394

13891395

13901396
<span class="k">def</span> <span class="nf">_register_device_module</span><span class="p">(</span><span class="n">device_type</span><span class="p">,</span> <span class="n">module</span><span class="p">):</span>

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_jit_internal.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_lobpcg.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_lowrank.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_tensor.html

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

@@ -1067,6 +1067,11 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
10671067

10681068
<span class="k">return</span> <span class="n">solve</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
10691069

1070+
<span class="k">def</span> <span class="nf">lstsq</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
1071+
<span class="kn">from</span> <span class="nn">._linalg_utils</span> <span class="kn">import</span> <span class="n">lstsq</span>
1072+
1073+
<span class="k">return</span> <span class="n">lstsq</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
1074+
10701075
<span class="k">def</span> <span class="nf">eig</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eigenvectors</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
10711076
<span class="kn">from</span> <span class="nn">._linalg_utils</span> <span class="kn">import</span> <span class="n">eig</span>
10721077

docs/master/_modules/torch/_tensor_str.html

Lines changed: 46 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

@@ -430,6 +430,7 @@
430430

431431
<h1>Source code for torch._tensor_str</h1><div class="highlight"><pre>
432432
<span></span><span class="kn">import</span> <span class="nn">math</span>
433+
<span class="kn">import</span> <span class="nn">textwrap</span>
433434
<span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Optional</span>
434435

435436
<span class="kn">import</span> <span class="nn">torch</span>
@@ -795,6 +796,8 @@ <h1>Source code for torch._tensor_str</h1><div class="highlight"><pre>
795796

796797

797798
<span class="k">def</span> <span class="nf">_str_intern</span><span class="p">(</span><span class="n">inp</span><span class="p">,</span> <span class="o">*</span><span class="p">,</span> <span class="n">tensor_contents</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
799+
<span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">is_functorch_wrapped_tensor</span><span class="p">(</span><span class="n">inp</span><span class="p">):</span>
800+
<span class="k">return</span> <span class="n">_functorch_wrapper_str_intern</span><span class="p">(</span><span class="n">inp</span><span class="p">,</span> <span class="n">tensor_contents</span><span class="o">=</span><span class="n">tensor_contents</span><span class="p">)</span>
798801
<span class="n">is_plain_tensor</span> <span class="o">=</span> <span class="nb">type</span><span class="p">(</span><span class="n">inp</span><span class="p">)</span> <span class="ow">is</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span> <span class="ow">or</span> <span class="nb">type</span><span class="p">(</span><span class="n">inp</span><span class="p">)</span> <span class="ow">is</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Parameter</span>
799802
<span class="k">if</span> <span class="n">inp</span><span class="o">.</span><span class="n">is_nested</span><span class="p">:</span>
800803
<span class="n">prefix</span> <span class="o">=</span> <span class="s2">&quot;nested_tensor(&quot;</span>
@@ -1028,6 +1031,48 @@ <h1>Source code for torch._tensor_str</h1><div class="highlight"><pre>
10281031
<span class="k">return</span> <span class="n">string_repr</span>
10291032

10301033

1034+
<span class="k">def</span> <span class="nf">_functorch_wrapper_str_intern</span><span class="p">(</span><span class="n">tensor</span><span class="p">,</span> <span class="o">*</span><span class="p">,</span> <span class="n">tensor_contents</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
1035+
<span class="n">level</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">maybe_get_level</span><span class="p">(</span><span class="n">tensor</span><span class="p">)</span>
1036+
<span class="k">assert</span> <span class="n">level</span> <span class="o">!=</span> <span class="o">-</span><span class="mi">1</span>
1037+
1038+
<span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">is_functionaltensor</span><span class="p">(</span><span class="n">tensor</span><span class="p">):</span>
1039+
<span class="c1"># Since we&#39;re unwrapping the FunctionalTensorWrapper, we need to make sure</span>
1040+
<span class="c1"># that it&#39;s up to date first</span>
1041+
<span class="n">torch</span><span class="o">.</span><span class="n">_sync</span><span class="p">(</span><span class="n">tensor</span><span class="p">)</span>
1042+
1043+
<span class="n">value</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">get_unwrapped</span><span class="p">(</span><span class="n">tensor</span><span class="p">)</span>
1044+
<span class="n">dl_enabled</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_are_functorch_transforms_active</span><span class="p">()</span>
1045+
<span class="k">try</span><span class="p">:</span>
1046+
<span class="c1"># Disable temporarily FuncTorchDynamicLayerFrontMode and</span>
1047+
<span class="c1"># FuncTorchDynamicLayerBackMode as included dispatch keys</span>
1048+
<span class="k">if</span> <span class="n">dl_enabled</span><span class="p">:</span>
1049+
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">_set_dynamic_layer_keys_included</span><span class="p">(</span><span class="kc">False</span><span class="p">)</span>
1050+
<span class="n">value_repr</span> <span class="o">=</span> <span class="nb">repr</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
1051+
<span class="k">finally</span><span class="p">:</span>
1052+
<span class="c1"># Reenable FuncTorchDynamicLayerFrontMode and</span>
1053+
<span class="c1"># FuncTorchDynamicLayerBackMode as included dispatch keys</span>
1054+
<span class="k">if</span> <span class="n">dl_enabled</span><span class="p">:</span>
1055+
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">_set_dynamic_layer_keys_included</span><span class="p">(</span><span class="kc">True</span><span class="p">)</span>
1056+
1057+
<span class="n">indented_value_repr</span> <span class="o">=</span> <span class="n">textwrap</span><span class="o">.</span><span class="n">indent</span><span class="p">(</span><span class="n">value_repr</span><span class="p">,</span> <span class="s2">&quot; &quot;</span> <span class="o">*</span> <span class="mi">4</span><span class="p">)</span>
1058+
<span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">is_batchedtensor</span><span class="p">(</span><span class="n">tensor</span><span class="p">):</span>
1059+
<span class="n">bdim</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">maybe_get_bdim</span><span class="p">(</span><span class="n">tensor</span><span class="p">)</span>
1060+
<span class="k">assert</span> <span class="n">bdim</span> <span class="o">!=</span> <span class="o">-</span><span class="mi">1</span>
1061+
<span class="k">return</span> <span class="p">(</span>
1062+
<span class="sa">f</span><span class="s2">&quot;BatchedTensor(lvl=</span><span class="si">{</span><span class="n">level</span><span class="si">}</span><span class="s2">, bdim=</span><span class="si">{</span><span class="n">bdim</span><span class="si">}</span><span class="s2">, value=</span><span class="se">\n</span><span class="s2">&quot;</span>
1063+
<span class="sa">f</span><span class="s2">&quot;</span><span class="si">{</span><span class="n">indented_value_repr</span><span class="si">}</span><span class="se">\n</span><span class="s2">&quot;</span>
1064+
<span class="sa">f</span><span class="s2">&quot;)&quot;</span>
1065+
<span class="p">)</span>
1066+
<span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">is_gradtrackingtensor</span><span class="p">(</span><span class="n">tensor</span><span class="p">):</span>
1067+
<span class="k">return</span> <span class="p">(</span>
1068+
<span class="sa">f</span><span class="s2">&quot;GradTrackingTensor(lvl=</span><span class="si">{</span><span class="n">level</span><span class="si">}</span><span class="s2">, value=</span><span class="se">\n</span><span class="s2">&quot;</span> <span class="sa">f</span><span class="s2">&quot;</span><span class="si">{</span><span class="n">indented_value_repr</span><span class="si">}</span><span class="se">\n</span><span class="s2">&quot;</span> <span class="sa">f</span><span class="s2">&quot;)&quot;</span>
1069+
<span class="p">)</span>
1070+
<span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_functorch</span><span class="o">.</span><span class="n">is_functionaltensor</span><span class="p">(</span><span class="n">tensor</span><span class="p">):</span>
1071+
<span class="k">return</span> <span class="sa">f</span><span class="s2">&quot;FunctionalTensor(lvl=</span><span class="si">{</span><span class="n">level</span><span class="si">}</span><span class="s2">, value=</span><span class="se">\\\n</span><span class="si">{</span><span class="n">value_repr</span><span class="si">}</span><span class="s2">)&quot;</span>
1072+
1073+
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;We don&#39;t know how to print this, please file us an issue&quot;</span><span class="p">)</span>
1074+
1075+
10311076
<span class="k">def</span> <span class="nf">_str</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="p">,</span> <span class="n">tensor_contents</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
10321077
<span class="k">with</span> <span class="n">torch</span><span class="o">.</span><span class="n">no_grad</span><span class="p">():</span>
10331078
<span class="k">return</span> <span class="n">_str_intern</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">tensor_contents</span><span class="o">=</span><span class="n">tensor_contents</span><span class="p">)</span>

docs/master/_modules/torch/_utils.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git1910c58 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git539076e ) &#x25BC</a>
223223
</div>
224224

225225

0 commit comments

Comments
 (0)