193
193
< div class ="pytorch-left-menu-search ">
194
194
195
195
< div class ="version ">
196
- < a href ='https://pytorch.org/docs/versions.html '> master (1.11.0a0+git4b0e1e1 ) ▼</ a >
196
+ < a href ='https://pytorch.org/docs/versions.html '> master (1.11.0a0+gitafbb6e8 ) ▼</ a >
197
197
</ div >
198
198
199
199
@@ -690,7 +690,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
690
690
< span class ="k "> return</ span > < span class ="nb "> type</ span > < span class ="p "> (</ span > < span class ="n "> obj</ span > < span class ="p "> )</ span > < span class ="ow "> in</ span > < span class ="n "> _storage_classes</ span > </ div >
691
691
692
692
693
- < div class =" viewcode-block " id =" set_default_tensor_type " > < a class =" viewcode-back " href =" ../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
693
+ < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
694
694
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default ``torch.Tensor`` type to floating point tensor type</ span >
695
695
< span class ="sd "> ``t``. This type will also be used as default floating point type for</ span >
696
696
< span class ="sd "> type inference in :func:`torch.tensor`.</ span >
@@ -711,10 +711,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
711
711
< span class ="sd "> """</ span >
712
712
< span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ,</ span > < span class ="n "> _string_classes</ span > < span class ="p "> ):</ span >
713
713
< span class ="n "> t</ span > < span class ="o "> =</ span > < span class ="n "> _import_dotted_name</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
714
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span > </ div >
714
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
715
715
716
716
717
- < div class =" viewcode-block " id =" set_default_dtype " > < a class =" viewcode-back " href =" ../generated/torch.set_default_dtype.html#torch.set_default_dtype " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
717
+ < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
718
718
< span class ="sa "> r</ span > < span class ="sd "> """</ span >
719
719
720
720
< span class ="sd "> Sets the default floating point dtype to :attr:`d`. Supports torch.float32</ span >
@@ -757,9 +757,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
757
757
< span class ="sd "> torch.complex128</ span >
758
758
759
759
< span class ="sd "> """</ span >
760
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
760
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
761
761
762
- < div class =" viewcode-block " id =" use_deterministic_algorithms " > < a class =" viewcode-back " href =" ../generated/torch.use_deterministic_algorithms.html#torch.use_deterministic_algorithms " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> use_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ,</ span > < span class ="o "> *</ span > < span class ="p "> ,</ span > < span class ="n "> warn_only</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ):</ span >
762
+ < span class ="k "> def</ span > < span class ="nf "> use_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ,</ span > < span class ="o "> *</ span > < span class ="p "> ,</ span > < span class ="n "> warn_only</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ):</ span >
763
763
< span class ="sa "> r</ span > < span class ="sd "> """ Sets whether PyTorch operations must use "deterministic"</ span >
764
764
< span class ="sd "> algorithms. That is, algorithms which, given the same input, and when</ span >
765
765
< span class ="sd "> run on the same software and hardware, always produce the same output.</ span >
@@ -882,7 +882,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
882
882
< span class ="sd "> ...</ span >
883
883
< span class ="sd "> RuntimeError: index_add_cuda_ does not have a deterministic implementation...</ span >
884
884
< span class ="sd "> """</ span >
885
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ,</ span > < span class ="n "> warn_only</ span > < span class ="o "> =</ span > < span class ="n "> warn_only</ span > < span class ="p "> )</ span > </ div >
885
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic_algorithms</ span > < span class ="p "> (</ span > < span class ="n "> mode</ span > < span class ="p "> ,</ span > < span class ="n "> warn_only</ span > < span class ="o "> =</ span > < span class ="n "> warn_only</ span > < span class ="p "> )</ span >
886
886
887
887
< div class ="viewcode-block " id ="are_deterministic_algorithms_enabled "> < a class ="viewcode-back " href ="../generated/torch.are_deterministic_algorithms_enabled.html#torch.are_deterministic_algorithms_enabled "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> are_deterministic_algorithms_enabled</ span > < span class ="p "> ():</ span >
888
888
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global deterministic flag is turned on. Refer to</ span >
@@ -897,7 +897,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
897
897
< span class ="sd "> """</ span >
898
898
< span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _get_deterministic_algorithms_warn_only</ span > < span class ="p "> ()</ span > </ div >
899
899
900
- < div class =" viewcode-block " id =" set_deterministic_debug_mode " > < a class =" viewcode-back " href =" ../generated/torch.set_deterministic_debug_mode.html#torch.set_deterministic_debug_mode " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_deterministic_debug_mode</ span > < span class ="p "> (</ span > < span class ="n "> debug_mode</ span > < span class ="p "> :</ span > < span class ="n "> Union</ span > < span class ="p "> [</ span > < span class ="n "> builtins</ span > < span class ="o "> .</ span > < span class ="n "> int</ span > < span class ="p "> ,</ span > < span class ="nb "> str</ span > < span class ="p "> ])</ span > < span class ="o "> -></ span > < span class ="kc "> None</ span > < span class ="p "> :</ span >
900
+ < span class ="k "> def</ span > < span class ="nf "> set_deterministic_debug_mode</ span > < span class ="p "> (</ span > < span class ="n "> debug_mode</ span > < span class ="p "> :</ span > < span class ="n "> Union</ span > < span class ="p "> [</ span > < span class ="n "> builtins</ span > < span class ="o "> .</ span > < span class ="n "> int</ span > < span class ="p "> ,</ span > < span class ="nb "> str</ span > < span class ="p "> ])</ span > < span class ="o "> -></ span > < span class ="kc "> None</ span > < span class ="p "> :</ span >
901
901
< span class ="sa "> r</ span > < span class ="sd "> """Sets the debug mode for deterministic operations.</ span >
902
902
903
903
< span class ="sd "> .. note:: This is an alternative interface for</ span >
@@ -937,7 +937,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
937
937
< span class ="k "> else</ span > < span class ="p "> :</ span >
938
938
< span class ="k "> raise</ span > < span class ="ne "> RuntimeError</ span > < span class ="p "> (</ span >
939
939
< span class ="s1 "> 'invalid value of debug_mode, expected 0, 1, or 2, '</ span >
940
- < span class ="sa "> f</ span > < span class ="s1 "> 'but got </ span > < span class ="si "> {</ span > < span class ="n "> debug_mode</ span > < span class ="si "> }</ span > < span class ="s1 "> '</ span > < span class ="p "> )</ span > </ div >
940
+ < span class ="sa "> f</ span > < span class ="s1 "> 'but got </ span > < span class ="si "> {</ span > < span class ="n "> debug_mode</ span > < span class ="si "> }</ span > < span class ="s1 "> '</ span > < span class ="p "> )</ span >
941
941
942
942
< span class ="k "> def</ span > < span class ="nf "> get_deterministic_debug_mode</ span > < span class ="p "> ()</ span > < span class ="o "> -></ span > < span class ="n "> builtins</ span > < span class ="o "> .</ span > < span class ="n "> int</ span > < span class ="p "> :</ span >
943
943
< span class ="sa "> r</ span > < span class ="sd "> """Returns the current value of the debug mode for deterministic</ span >
@@ -953,7 +953,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
953
953
< span class ="k "> else</ span > < span class ="p "> :</ span >
954
954
< span class ="k "> return</ span > < span class ="mi "> 0</ span >
955
955
956
- < div class =" viewcode-block " id =" set_warn_always " > < a class =" viewcode-back " href =" ../generated/torch.set_warn_always.html#torch.set_warn_always " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_warn_always</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> ):</ span >
956
+ < span class ="k "> def</ span > < span class ="nf "> set_warn_always</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> ):</ span >
957
957
< span class ="sa "> r</ span > < span class ="sd "> """When this flag is False (default) then some PyTorch warnings may only</ span >
958
958
< span class ="sd "> appear once per process. This helps avoid excessive warning information.</ span >
959
959
< span class ="sd "> Setting it to True causes these warnings to always appear, which may be</ span >
@@ -963,7 +963,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
963
963
< span class ="sd "> b (:class:`bool`): If True, force warnings to always be emitted</ span >
964
964
< span class ="sd "> If False, set to the default behaviour</ span >
965
965
< span class ="sd "> """</ span >
966
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_warnAlways</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> )</ span > </ div >
966
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_warnAlways</ span > < span class ="p "> (</ span > < span class ="n "> b</ span > < span class ="p "> )</ span >
967
967
968
968
< div class ="viewcode-block " id ="is_warn_always_enabled "> < a class ="viewcode-back " href ="../generated/torch.is_warn_always_enabled.html#torch.is_warn_always_enabled "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> is_warn_always_enabled</ span > < span class ="p "> ():</ span >
969
969
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global warn_always flag is turned on. Refer to</ span >
0 commit comments