192
192
< div class ="pytorch-left-menu-search ">
193
193
194
194
< div class ="version ">
195
- < a href ='https://pytorch.org/docs/versions.html '> master (1.9.0a0+gitf3a9779 ) ▼</ a >
195
+ < a href ='https://pytorch.org/docs/versions.html '> master (1.9.0a0+gitf8f3346 ) ▼</ a >
196
196
</ div >
197
197
198
198
@@ -685,7 +685,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
685
685
< span class ="k "> return</ span > < span class ="nb "> type</ span > < span class ="p "> (</ span > < span class ="n "> obj</ span > < span class ="p "> )</ span > < span class ="ow "> in</ span > < span class ="n "> _storage_classes</ span >
686
686
687
687
688
- < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
688
+ < div class =" viewcode-block " id =" set_default_tensor_type " > < a class =" viewcode-back " href =" ../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
689
689
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default ``torch.Tensor`` type to floating point tensor type</ span >
690
690
< span class ="sd "> ``t``. This type will also be used as default floating point type for</ span >
691
691
< span class ="sd "> type inference in :func:`torch.tensor`.</ span >
@@ -706,10 +706,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
706
706
< span class ="sd "> """</ span >
707
707
< span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ,</ span > < span class ="n "> _string_classes</ span > < span class ="p "> ):</ span >
708
708
< span class ="n "> t</ span > < span class ="o "> =</ span > < span class ="n "> _import_dotted_name</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
709
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
709
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span > </ div >
710
710
711
711
712
- < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
712
+ < div class =" viewcode-block " id =" set_default_dtype " > < a class =" viewcode-back " href =" ../generated/torch.set_default_dtype.html#torch.set_default_dtype " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
713
713
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default floating point dtype to :attr:`d`.</ span >
714
714
< span class ="sd "> This dtype is:</ span >
715
715
@@ -737,9 +737,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
737
737
< span class ="sd "> torch.complex128</ span >
738
738
739
739
< span class ="sd "> """</ span >
740
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
740
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
741
741
742
- < span class ="k "> def</ span > < span class ="nf "> use_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ):</ span >
742
+ < div class =" viewcode-block " id =" use_deterministic_algorithms " > < a class =" viewcode-back " href =" ../generated/torch.use_deterministic_algorithms.html#torch.use_deterministic_algorithms " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> use_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ):</ span >
743
743
< span class ="sa "> r</ span > < span class ="sd "> """ Sets whether PyTorch operations must use "deterministic"</ span >
744
744
< span class ="sd "> algorithms. That is, algorithms which, given the same input, and when</ span >
745
745
< span class ="sd "> run on the same software and hardware, always produce the same output.</ span >
@@ -850,7 +850,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
850
850
< span class ="sd "> ...</ span >
851
851
< span class ="sd "> RuntimeError: index_add_cuda_ does not have a deterministic implementation...</ span >
852
852
< span class ="sd "> """</ span >
853
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> )</ span >
853
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> )</ span > </ div >
854
854
855
855
< span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
856
856
< span class ="sa "> r</ span > < span class ="sd "> """This function is deprecated and will be removed in a future release.</ span >
@@ -878,7 +878,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
878
878
< span class ="k "> return</ span > < span class ="n "> are_deterministic_algorithms_enabled</ span > < span class ="p "> ()</ span >
879
879
880
880
881
- < span class ="k "> def</ span > < span class ="nf "> set_warn_always</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> ):</ span >
881
+ < div class =" viewcode-block " id =" set_warn_always " > < a class =" viewcode-back " href =" ../generated/torch.set_warn_always.html#torch.set_warn_always " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_warn_always</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> ):</ span >
882
882
< span class ="sa "> r</ span > < span class ="sd "> """When this flag is False (default) then some PyTorch warnings may only</ span >
883
883
< span class ="sd "> appear once per process. This helps avoid excessive warning information.</ span >
884
884
< span class ="sd "> Setting it to True causes these warnings to always appear, which may be</ span >
@@ -888,7 +888,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
888
888
< span class ="sd "> b (:class:`bool`): If True, force warnings to always be emitted</ span >
889
889
< span class ="sd "> If False, set to the default behaviour</ span >
890
890
< span class ="sd "> """</ span >
891
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_warnAlways</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> )</ span >
891
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_warnAlways</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> )</ span > </ div >
892
892
893
893
< span class ="k "> def</ span > < span class ="nf "> is_warn_always_enabled</ span > < span class ="p "> ():</ span >
894
894
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global warn_always flag is turned on. Refer to</ span >
0 commit comments