187
187
188
188
189
189
< div class ="version ">
190
- < a href ='http://pytorch.org/docs/versions.html '> 1.8.0a0+43d4eaf ▼</ a >
190
+ < a href ='http://pytorch.org/docs/versions.html '> 1.8.0a0+e6279f8 ▼</ a >
191
191
</ div >
192
192
193
193
@@ -647,7 +647,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
647
647
< span class ="k "> return</ span > < span class ="nb "> type</ span > < span class ="p "> (</ span > < span class ="n "> obj</ span > < span class ="p "> )</ span > < span class ="ow "> in</ span > < span class ="n "> _storage_classes</ span > </ div >
648
648
649
649
650
- < div class =" viewcode-block " id =" set_default_tensor_type " > < a class =" viewcode-back " href =" ../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
650
+ < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
651
651
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default ``torch.Tensor`` type to floating point tensor type</ span >
652
652
< span class ="sd "> ``t``. This type will also be used as default floating point type for</ span >
653
653
< span class ="sd "> type inference in :func:`torch.tensor`.</ span >
@@ -668,10 +668,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
668
668
< span class ="sd "> """</ span >
669
669
< span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ,</ span > < span class ="n "> _string_classes</ span > < span class ="p "> ):</ span >
670
670
< span class ="n "> t</ span > < span class ="o "> =</ span > < span class ="n "> _import_dotted_name</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
671
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span > </ div >
671
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
672
672
673
673
674
- < div class =" viewcode-block " id =" set_default_dtype " > < a class =" viewcode-back " href =" ../generated/torch.set_default_dtype.html#torch.set_default_dtype " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
674
+ < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
675
675
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default floating point dtype to :attr:`d`.</ span >
676
676
< span class ="sd "> This dtype is:</ span >
677
677
@@ -699,9 +699,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
699
699
< span class ="sd "> torch.complex128</ span >
700
700
701
701
< span class ="sd "> """</ span >
702
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
702
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
703
703
704
- < div class =" viewcode-block " id =" set_deterministic " > < a class =" viewcode-back " href =" ../generated/torch.set_deterministic.html#torch.set_deterministic " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
704
+ < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
705
705
< span class ="sa "> r</ span > < span class ="sd "> """ Sets whether PyTorch operations must use "deterministic"</ span >
706
706
< span class ="sd "> algorithms. That is, algorithms which, given the same input, and when</ span >
707
707
< span class ="sd "> run on the same software and hardware, always produce the same output.</ span >
@@ -778,7 +778,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
778
778
< span class ="sd "> d (:class:`bool`): If True, force operations to be deterministic.</ span >
779
779
< span class ="sd "> If False, allow non-deterministic operations.</ span >
780
780
< span class ="sd "> """</ span >
781
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
781
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
782
782
783
783
< div class ="viewcode-block " id ="is_deterministic "> < a class ="viewcode-back " href ="../generated/torch.is_deterministic.html#torch.is_deterministic "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> is_deterministic</ span > < span class ="p "> ():</ span >
784
784
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global deterministic flag is turned on. Refer to</ span >
0 commit comments