158
158
159
159
160
160
< div class ="version ">
161
- < a href ='http://pytorch.org/docs/versions.html '> 1.7.0a0+69d74c8 ▼</ a >
161
+ < a href ='http://pytorch.org/docs/versions.html '> 1.7.0a0+970389e ▼</ a >
162
162
</ div >
163
163
164
164
@@ -588,7 +588,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
588
588
< span class ="k "> return</ span > < span class ="nb "> type</ span > < span class ="p "> (</ span > < span class ="n "> obj</ span > < span class ="p "> )</ span > < span class ="ow "> in</ span > < span class ="n "> _storage_classes</ span > </ div >
589
589
590
590
591
- < div class =" viewcode-block " id =" set_default_tensor_type " > < a class =" viewcode-back " href =" ../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
591
+ < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
592
592
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default ``torch.Tensor`` type to floating point tensor type</ span >
593
593
< span class ="sd "> ``t``. This type will also be used as default floating point type for</ span >
594
594
< span class ="sd "> type inference in :func:`torch.tensor`.</ span >
@@ -609,10 +609,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
609
609
< span class ="sd "> """</ span >
610
610
< span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ,</ span > < span class ="n "> _string_classes</ span > < span class ="p "> ):</ span >
611
611
< span class ="n "> t</ span > < span class ="o "> =</ span > < span class ="n "> _import_dotted_name</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
612
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span > </ div >
612
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
613
613
614
614
615
- < div class =" viewcode-block " id =" set_default_dtype " > < a class =" viewcode-back " href =" ../generated/torch.set_default_dtype.html#torch.set_default_dtype " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
615
+ < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
616
616
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default floating point dtype to :attr:`d`.</ span >
617
617
< span class ="sd "> This dtype is:</ span >
618
618
@@ -640,9 +640,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
640
640
< span class ="sd "> torch.complex128</ span >
641
641
642
642
< span class ="sd "> """</ span >
643
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
643
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
644
644
645
- < div class =" viewcode-block " id =" set_deterministic " > < a class =" viewcode-back " href =" ../generated/torch.set_deterministic.html#torch.set_deterministic " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
645
+ < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
646
646
< span class ="sa "> r</ span > < span class ="sd "> """ Sets whether native PyTorch operations must use deterministic</ span >
647
647
< span class ="sd "> algorithms. When True, operations without deterministic algorithms</ span >
648
648
< span class ="sd "> will throw a :class:RuntimeError when called.</ span >
@@ -712,7 +712,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
712
712
< span class ="sd "> d (:class:`bool`): If True, force operations to be deterministic.</ span >
713
713
< span class ="sd "> If False, allow non-deterministic operations.</ span >
714
714
< span class ="sd "> """</ span >
715
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
715
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
716
716
717
717
< div class ="viewcode-block " id ="is_deterministic "> < a class ="viewcode-back " href ="../generated/torch.is_deterministic.html#torch.is_deterministic "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> is_deterministic</ span > < span class ="p "> ():</ span >
718
718
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global deterministic flag is turned on. Refer to</ span >
@@ -896,9 +896,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
896
896
< span class ="k "> del</ span > < span class ="n "> _torch_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _tensor_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _storage_docs</ span >
897
897
898
898
899
- < div class =" viewcode-block " id =" compiled_with_cxx11_abi " > < a class =" viewcode-back " href =" ../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
899
+ < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
900
900
< span class ="sa "> r</ span > < span class ="sd "> """Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1"""</ span >
901
- < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span > </ div >
901
+ < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span >
902
902
903
903
904
904
< span class ="c1 "> # Import the ops "namespace"</ span >
0 commit comments