187
187
188
188
189
189
< div class ="version ">
190
- < a href ='http://pytorch.org/docs/versions.html '> 1.8.0a0+71e7186 ▼</ a >
190
+ < a href ='http://pytorch.org/docs/versions.html '> 1.8.0a0+567520f ▼</ a >
191
191
</ div >
192
192
193
193
@@ -420,7 +420,8 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
420
420
< span class ="s1 "> 'DoubleTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'FloatTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'LongTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'IntTensor'</ span > < span class ="p "> ,</ span >
421
421
< span class ="s1 "> 'ShortTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'CharTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'ByteTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'BoolTensor'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'Tensor'</ span > < span class ="p "> ,</ span >
422
422
< span class ="s1 "> 'lobpcg'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'use_deterministic_algorithms'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'set_deterministic'</ span > < span class ="p "> ,</ span >
423
- < span class ="s1 "> 'are_deterministic_algorithms_enabled'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'is_deterministic'</ span >
423
+ < span class ="s1 "> 'are_deterministic_algorithms_enabled'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'is_deterministic'</ span > < span class ="p "> ,</ span >
424
+ < span class ="s1 "> 'set_warn_always'</ span > < span class ="p "> ,</ span > < span class ="s1 "> 'is_warn_always_enabled'</ span > < span class ="p "> ,</ span >
424
425
< span class ="p "> ]</ span >
425
426
426
427
< span class ="c1 "> ################################################################################</ span >
@@ -803,11 +804,11 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
803
804
804
805
< span class ="n "> use_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
805
806
806
- < span class ="k "> def</ span > < span class ="nf "> are_deterministic_algorithms_enabled</ span > < span class ="p "> ():</ span >
807
+ < div class =" viewcode-block " id =" are_deterministic_algorithms_enabled " > < a class =" viewcode-back " href =" ../generated/torch.are_deterministic_algorithms_enabled.html#torch.are_deterministic_algorithms_enabled " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> are_deterministic_algorithms_enabled</ span > < span class ="p "> ():</ span >
807
808
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global deterministic flag is turned on. Refer to</ span >
808
809
< span class ="sd "> :func:`torch.use_deterministic_algorithms` documentation for more details.</ span >
809
810
< span class ="sd "> """</ span >
810
- < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _get_deterministic_algorithms</ span > < span class ="p "> ()</ span >
811
+ < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _get_deterministic_algorithms</ span > < span class ="p "> ()</ span > </ div >
811
812
812
813
< span class ="k "> def</ span > < span class ="nf "> is_deterministic</ span > < span class ="p "> ():</ span >
813
814
< span class ="sa "> r</ span > < span class ="sd "> """This function is deprecated and will be removed in a future release.</ span >
@@ -819,6 +820,24 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
819
820
< span class ="k "> return</ span > < span class ="n "> are_deterministic_algorithms_enabled</ span > < span class ="p "> ()</ span >
820
821
821
822
823
+ < div class ="viewcode-block " id ="set_warn_always "> < a class ="viewcode-back " href ="../generated/torch.set_warn_always.html#torch.set_warn_always "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> set_warn_always</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> ):</ span >
824
+ < span class ="sa "> r</ span > < span class ="sd "> """When this flag is False (default) then some PyTorch warnings may only</ span >
825
+ < span class ="sd "> appear once per process. This helps avoid excessive warning information.</ span >
826
+ < span class ="sd "> Setting it to True causes these warnings to always appear, which may be</ span >
827
+ < span class ="sd "> helpful when debugging.</ span >
828
+
829
+ < span class ="sd "> Args:</ span >
830
+ < span class ="sd "> b (:class:`bool`): If True, force warnings to always be emitted</ span >
831
+ < span class ="sd "> If False, set to the default behaviour</ span >
832
+ < span class ="sd "> """</ span >
833
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_warnAlways</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> )</ span > </ div >
834
+
835
+ < div class ="viewcode-block " id ="is_warn_always_enabled "> < a class ="viewcode-back " href ="../generated/torch.is_warn_always_enabled.html#torch.is_warn_always_enabled "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> is_warn_always_enabled</ span > < span class ="p "> ():</ span >
836
+ < span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global warn_always flag is turned on. Refer to</ span >
837
+ < span class ="sd "> :func:`torch.set_warn_always` documentation for more details.</ span >
838
+ < span class ="sd "> """</ span >
839
+ < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _get_warnAlways</ span > < span class ="p "> ()</ span > </ div >
840
+
822
841
< span class ="c1 "> ################################################################################</ span >
823
842
< span class ="c1 "> # Define Storage and Tensor classes</ span >
824
843
< span class ="c1 "> ################################################################################</ span >
@@ -1015,9 +1034,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
1015
1034
< span class ="k "> del</ span > < span class ="n "> _torch_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _tensor_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _storage_docs</ span >
1016
1035
1017
1036
1018
- < div class =" viewcode-block " id =" compiled_with_cxx11_abi " > < a class =" viewcode-back " href =" ../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
1037
+ < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
1019
1038
< span class ="sa "> r</ span > < span class ="sd "> """Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1"""</ span >
1020
- < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span > </ div >
1039
+ < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span >
1021
1040
1022
1041
1023
1042
< span class ="c1 "> # Import the ops "namespace"</ span >
0 commit comments