|
62 | 62 |
|
63 | 63 |
|
64 | 64 | <div class="version">
|
65 |
| - <a href="http://pytorch.org/docs/versions.html">master (0.4.0a0+db6be0e ) ▼</a> |
| 65 | + <a href="http://pytorch.org/docs/versions.html">master (0.4.0a0+b984c0b ) ▼</a> |
66 | 66 | </div>
|
67 | 67 |
|
68 | 68 |
|
@@ -637,7 +637,7 @@ <h1>Source code for torch.functional</h1><div class="highlight"><pre>
|
637 | 637 | <span class="kn">import</span> <span class="nn">math</span>
|
638 | 638 |
|
639 | 639 | <span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span>
|
640 |
| - <span class="s1">'split'</span><span class="p">,</span> <span class="s1">'chunk'</span><span class="p">,</span> <span class="s1">'stack'</span><span class="p">,</span> <span class="s1">'unbind'</span><span class="p">,</span> <span class="s1">'btriunpack'</span><span class="p">,</span> <span class="s1">'matmul'</span><span class="p">,</span> <span class="s1">'det'</span><span class="p">,</span> <span class="s1">'stft'</span><span class="p">,</span> |
| 640 | + <span class="s1">'split'</span><span class="p">,</span> <span class="s1">'chunk'</span><span class="p">,</span> <span class="s1">'empty_like'</span><span class="p">,</span> <span class="s1">'stack'</span><span class="p">,</span> <span class="s1">'unbind'</span><span class="p">,</span> <span class="s1">'btriunpack'</span><span class="p">,</span> <span class="s1">'matmul'</span><span class="p">,</span> <span class="s1">'det'</span><span class="p">,</span> <span class="s1">'stft'</span><span class="p">,</span> |
641 | 641 | <span class="s1">'hann_window'</span><span class="p">,</span> <span class="s1">'hamming_window'</span><span class="p">,</span> <span class="s1">'bartlett_window'</span><span class="p">,</span> <span class="s1">'where'</span><span class="p">,</span>
|
642 | 642 | <span class="p">]</span>
|
643 | 643 |
|
@@ -697,6 +697,26 @@ <h1>Source code for torch.functional</h1><div class="highlight"><pre>
|
697 | 697 | <span class="k">return</span> <span class="n">split</span><span class="p">(</span><span class="n">tensor</span><span class="p">,</span> <span class="n">split_size</span><span class="p">,</span> <span class="n">dim</span><span class="p">)</span></div>
|
698 | 698 |
|
699 | 699 |
|
| 700 | +<span class="k">def</span> <span class="nf">empty_like</span><span class="p">(</span><span class="nb">input</span><span class="p">):</span> |
| 701 | + <span class="sa">r</span><span class="sd">"""empty_like(input) -> Tensor</span> |
| 702 | + |
| 703 | +<span class="sd"> Returns an uninitialized tensor with the same size as :attr:`input`.</span> |
| 704 | + |
| 705 | +<span class="sd"> Args:</span> |
| 706 | +<span class="sd"> input (Tensor): the size of :attr:`input` will determine size of the output tensor</span> |
| 707 | + |
| 708 | +<span class="sd"> Example::</span> |
| 709 | + |
| 710 | +<span class="sd"> >>> input = torch.LongTensor(2,3)</span> |
| 711 | +<span class="sd"> >>> input.new(input.size())</span> |
| 712 | + |
| 713 | +<span class="sd"> 1.3996e+14 1.3996e+14 1.3996e+14</span> |
| 714 | +<span class="sd"> 4.0000e+00 0.0000e+00 0.0000e+00</span> |
| 715 | +<span class="sd"> [torch.LongTensor of size 2x3]</span> |
| 716 | +<span class="sd"> """</span> |
| 717 | + <span class="k">return</span> <span class="nb">input</span><span class="o">.</span><span class="n">new</span><span class="p">(</span><span class="nb">input</span><span class="o">.</span><span class="n">size</span><span class="p">())</span> |
| 718 | + |
| 719 | + |
700 | 720 | <div class="viewcode-block" id="stack"><a class="viewcode-back" href="../../torch.html#torch.stack">[docs]</a><span class="k">def</span> <span class="nf">stack</span><span class="p">(</span><span class="n">sequence</span><span class="p">,</span> <span class="n">dim</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
|
701 | 721 | <span class="sa">r</span><span class="sd">"""Concatenates sequence of tensors along a new dimension.</span>
|
702 | 722 |
|
|
0 commit comments