Skip to content

Commit caa9714

Browse files
committed
Generate Python docs from pytorch/pytorch@4bb7e14
1 parent aa11b49 commit caa9714

File tree

2,001 files changed

+4195
-2426
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,001 files changed

+4195
-2426
lines changed

docs/master/_images/RReLU.png

-32 Bytes
Loading

docs/master/_modules/index.html

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

@@ -441,6 +441,7 @@ <h1>All modules for which code is available</h1>
441441
<li><a href="torch/_lowrank.html">torch._lowrank</a></li>
442442
<li><a href="torch/_tensor.html">torch._tensor</a></li>
443443
<li><a href="torch/_tensor_str.html">torch._tensor_str</a></li>
444+
<li><a href="torch/_utils.html">torch._utils</a></li>
444445
<li><a href="torch/_vmap_internals.html">torch._vmap_internals</a></li>
445446
<li><a href="torch/amp/autocast_mode.html">torch.amp.autocast_mode</a></li>
446447
<li><a href="torch/ao/ns/_numeric_suite.html">torch.ao.ns._numeric_suite</a></li>

docs/master/_modules/torch.html

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

@@ -467,7 +467,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
467467
<span class="s1">&#39;no_grad&#39;</span><span class="p">,</span> <span class="s1">&#39;enable_grad&#39;</span><span class="p">,</span> <span class="s1">&#39;rand&#39;</span><span class="p">,</span> <span class="s1">&#39;randn&#39;</span><span class="p">,</span> <span class="s1">&#39;inference_mode&#39;</span><span class="p">,</span>
468468
<span class="s1">&#39;DoubleStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;FloatStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;LongStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;IntStorage&#39;</span><span class="p">,</span>
469469
<span class="s1">&#39;ShortStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;CharStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;ByteStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;BoolStorage&#39;</span><span class="p">,</span>
470-
<span class="s1">&#39;_TypedStorage&#39;</span><span class="p">,</span>
470+
<span class="s1">&#39;TypedStorage&#39;</span><span class="p">,</span> <span class="s1">&#39;UntypedStorage&#39;</span><span class="p">,</span>
471471
<span class="s1">&#39;DoubleTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;FloatTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;LongTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;IntTensor&#39;</span><span class="p">,</span>
472472
<span class="s1">&#39;ShortTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;CharTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;ByteTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;BoolTensor&#39;</span><span class="p">,</span> <span class="s1">&#39;Tensor&#39;</span><span class="p">,</span>
473473
<span class="s1">&#39;lobpcg&#39;</span><span class="p">,</span> <span class="s1">&#39;use_deterministic_algorithms&#39;</span><span class="p">,</span>
@@ -1083,10 +1083,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
10831083
<span class="c1">################################################################################</span>
10841084

10851085
<span class="kn">from</span> <span class="nn">._tensor</span> <span class="kn">import</span> <span class="n">Tensor</span>
1086-
<span class="kn">from</span> <span class="nn">.storage</span> <span class="kn">import</span> <span class="n">_StorageBase</span><span class="p">,</span> <span class="n">_TypedStorage</span><span class="p">,</span> <span class="n">_LegacyStorage</span><span class="p">,</span> <span class="n">_UntypedStorage</span>
1086+
<span class="kn">from</span> <span class="nn">.storage</span> <span class="kn">import</span> <span class="n">_StorageBase</span><span class="p">,</span> <span class="n">TypedStorage</span><span class="p">,</span> <span class="n">_LegacyStorage</span><span class="p">,</span> <span class="n">UntypedStorage</span>
10871087

10881088
<span class="c1"># NOTE: New &lt;type&gt;Storage classes should never be added. When adding a new</span>
1089-
<span class="c1"># dtype, use torch.storage._TypedStorage directly.</span>
1089+
<span class="c1"># dtype, use torch.storage.TypedStorage directly.</span>
10901090

10911091
<div class="viewcode-block" id="ByteStorage"><a class="viewcode-back" href="../storage.html#torch.ByteStorage">[docs]</a><span class="k">class</span> <span class="nc">ByteStorage</span><span class="p">(</span><span class="n">_LegacyStorage</span><span class="p">):</span>
10921092
<div class="viewcode-block" id="ByteStorage.dtype"><a class="viewcode-back" href="../storage.html#torch.ByteStorage.dtype">[docs]</a> <span class="nd">@classproperty</span>
@@ -1174,11 +1174,11 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
11741174
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">quint2x4</span></div></div>
11751175

11761176
<span class="n">_storage_classes</span> <span class="o">=</span> <span class="p">{</span>
1177-
<span class="n">_UntypedStorage</span><span class="p">,</span> <span class="n">DoubleStorage</span><span class="p">,</span> <span class="n">FloatStorage</span><span class="p">,</span> <span class="n">LongStorage</span><span class="p">,</span> <span class="n">IntStorage</span><span class="p">,</span>
1177+
<span class="n">UntypedStorage</span><span class="p">,</span> <span class="n">DoubleStorage</span><span class="p">,</span> <span class="n">FloatStorage</span><span class="p">,</span> <span class="n">LongStorage</span><span class="p">,</span> <span class="n">IntStorage</span><span class="p">,</span>
11781178
<span class="n">ShortStorage</span><span class="p">,</span> <span class="n">CharStorage</span><span class="p">,</span> <span class="n">ByteStorage</span><span class="p">,</span> <span class="n">HalfStorage</span><span class="p">,</span> <span class="n">BoolStorage</span><span class="p">,</span>
11791179
<span class="n">QUInt8Storage</span><span class="p">,</span> <span class="n">QInt8Storage</span><span class="p">,</span> <span class="n">QInt32Storage</span><span class="p">,</span> <span class="n">BFloat16Storage</span><span class="p">,</span>
11801180
<span class="n">ComplexFloatStorage</span><span class="p">,</span> <span class="n">ComplexDoubleStorage</span><span class="p">,</span> <span class="n">QUInt4x2Storage</span><span class="p">,</span> <span class="n">QUInt2x4Storage</span><span class="p">,</span>
1181-
<span class="n">_TypedStorage</span>
1181+
<span class="n">TypedStorage</span>
11821182
<span class="p">}</span>
11831183

11841184
<span class="c1"># The _tensor_classes set is initialized by the call to _C._initialize_tensor_type_bindings()</span>

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_jit_internal.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_lobpcg.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_lowrank.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

docs/master/_modules/torch/_tensor.html

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

@@ -587,10 +587,10 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
587587
<span class="sa">f</span><span class="s2">&quot;Unsupported qscheme </span><span class="si">{</span><span class="bp">self</span><span class="o">.</span><span class="n">qscheme</span><span class="p">()</span><span class="si">}</span><span class="s2"> in deepcopy&quot;</span>
588588
<span class="p">)</span>
589589
<span class="c1"># TODO: Once we decide to break serialization FC, no longer</span>
590-
<span class="c1"># need to wrap with _TypedStorage</span>
590+
<span class="c1"># need to wrap with TypedStorage</span>
591591
<span class="n">new_tensor</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_utils</span><span class="o">.</span><span class="n">_rebuild_qtensor</span><span class="p">(</span>
592-
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">_TypedStorage</span><span class="p">(</span>
593-
<span class="n">wrap_storage</span><span class="o">=</span><span class="n">new_storage</span><span class="o">.</span><span class="n">_untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
592+
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">TypedStorage</span><span class="p">(</span>
593+
<span class="n">wrap_storage</span><span class="o">=</span><span class="n">new_storage</span><span class="o">.</span><span class="n">untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
594594
<span class="p">),</span>
595595
<span class="bp">self</span><span class="o">.</span><span class="n">storage_offset</span><span class="p">(),</span>
596596
<span class="bp">self</span><span class="o">.</span><span class="n">size</span><span class="p">(),</span>
@@ -684,7 +684,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
684684
<span class="k">if</span> <span class="n">has_torch_function_unary</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
685685
<span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">Tensor</span><span class="o">.</span><span class="n">storage</span><span class="p">,</span> <span class="p">(</span><span class="bp">self</span><span class="p">,),</span> <span class="bp">self</span><span class="p">)</span>
686686

687-
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">_TypedStorage</span><span class="p">(</span><span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">_storage</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span></div>
687+
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">TypedStorage</span><span class="p">(</span><span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">_storage</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span></div>
688688

689689
<span class="k">def</span> <span class="nf">_reduce_ex_internal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">proto</span><span class="p">):</span>
690690
<span class="n">check_serializing_named_tensor</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
@@ -753,10 +753,10 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
753753
<span class="sa">f</span><span class="s2">&quot;Serialization is not supported for tensors of type </span><span class="si">{</span><span class="bp">self</span><span class="o">.</span><span class="n">qscheme</span><span class="p">()</span><span class="si">}</span><span class="s2">&quot;</span>
754754
<span class="p">)</span>
755755
<span class="c1"># TODO: Once we decide to break serialization FC, no longer</span>
756-
<span class="c1"># need to wrap with _TypedStorage</span>
756+
<span class="c1"># need to wrap with TypedStorage</span>
757757
<span class="n">args_qtensor</span> <span class="o">=</span> <span class="p">(</span>
758-
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">_TypedStorage</span><span class="p">(</span>
759-
<span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">storage</span><span class="p">()</span><span class="o">.</span><span class="n">_untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
758+
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">TypedStorage</span><span class="p">(</span>
759+
<span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">storage</span><span class="p">()</span><span class="o">.</span><span class="n">untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
760760
<span class="p">),</span>
761761
<span class="bp">self</span><span class="o">.</span><span class="n">storage_offset</span><span class="p">(),</span>
762762
<span class="nb">tuple</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">size</span><span class="p">()),</span>
@@ -811,10 +811,10 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
811811
<span class="k">return</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_utils</span><span class="o">.</span><span class="n">_rebuild_wrapper_subclass</span><span class="p">,</span> <span class="n">arg_wrapper_subclass</span><span class="p">)</span>
812812
<span class="k">else</span><span class="p">:</span>
813813
<span class="c1"># TODO: Once we decide to break serialization FC, no longer</span>
814-
<span class="c1"># need to wrap with _TypedStorage</span>
814+
<span class="c1"># need to wrap with TypedStorage</span>
815815
<span class="n">args</span> <span class="o">=</span> <span class="p">(</span>
816-
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">_TypedStorage</span><span class="p">(</span>
817-
<span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">storage</span><span class="p">()</span><span class="o">.</span><span class="n">_untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
816+
<span class="n">torch</span><span class="o">.</span><span class="n">storage</span><span class="o">.</span><span class="n">TypedStorage</span><span class="p">(</span>
817+
<span class="n">wrap_storage</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">storage</span><span class="p">()</span><span class="o">.</span><span class="n">untyped</span><span class="p">(),</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
818818
<span class="p">),</span>
819819
<span class="bp">self</span><span class="o">.</span><span class="n">storage_offset</span><span class="p">(),</span>
820820
<span class="nb">tuple</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">size</span><span class="p">()),</span>

docs/master/_modules/torch/_tensor_str.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@
219219
<div class="pytorch-left-menu-search">
220220

221221
<div class="version">
222-
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git0b25664 ) &#x25BC</a>
222+
<a href='https://pytorch.org/docs/versions.html'>master (1.13.0a0+git4bb7e14 ) &#x25BC</a>
223223
</div>
224224

225225

0 commit comments

Comments
 (0)