|
207 | 207 | <div class="pytorch-left-menu-search">
|
208 | 208 |
|
209 | 209 | <div class="version">
|
210 |
| - <a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+git4a7e07e ) ▼</a> |
| 210 | + <a href='https://pytorch.org/docs/versions.html'>master (1.12.0a0+git0ca0e02 ) ▼</a> |
211 | 211 | </div>
|
212 | 212 |
|
213 | 213 |
|
@@ -496,7 +496,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
|
496 | 496 | <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">()</span>
|
497 | 497 |
|
498 | 498 |
|
499 |
| -<div class="viewcode-block" id="no_grad"><a class="viewcode-back" href="../../../generated/torch.no_grad.html#torch.no_grad">[docs]</a><span class="k">class</span> <span class="nc">no_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span> |
| 499 | +<span class="k">class</span> <span class="nc">no_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span> |
500 | 500 | <span class="sa">r</span><span class="sd">"""Context-manager that disabled gradient calculation.</span>
|
501 | 501 |
|
502 | 502 | <span class="sd"> Disabling gradient calculation is useful for inference, when you are sure</span>
|
@@ -540,7 +540,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
|
540 | 540 | <span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="kc">False</span><span class="p">)</span>
|
541 | 541 |
|
542 | 542 | <span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-></span> <span class="kc">None</span><span class="p">:</span>
|
543 |
| - <span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span></div> |
| 543 | + <span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span> |
544 | 544 |
|
545 | 545 |
|
546 | 546 | <span class="k">class</span> <span class="nc">enable_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span>
|
@@ -586,7 +586,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
|
586 | 586 | <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span>
|
587 | 587 |
|
588 | 588 |
|
589 |
| -<span class="k">class</span> <span class="nc">set_grad_enabled</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span> |
| 589 | +<div class="viewcode-block" id="set_grad_enabled"><a class="viewcode-back" href="../../../generated/torch.set_grad_enabled.html#torch.set_grad_enabled">[docs]</a><span class="k">class</span> <span class="nc">set_grad_enabled</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span> |
590 | 590 | <span class="sa">r</span><span class="sd">"""Context-manager that sets gradient calculation to on or off.</span>
|
591 | 591 |
|
592 | 592 | <span class="sd"> ``set_grad_enabled`` will enable or disable grads based on its argument :attr:`mode`.</span>
|
@@ -636,7 +636,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
|
636 | 636 | <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span>
|
637 | 637 |
|
638 | 638 | <span class="k">def</span> <span class="nf">clone</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
|
639 |
| - <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mode</span><span class="p">)</span> |
| 639 | + <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mode</span><span class="p">)</span></div> |
640 | 640 |
|
641 | 641 |
|
642 | 642 | <span class="k">class</span> <span class="nc">inference_mode</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span>
|
|
0 commit comments