@@ -705,7 +705,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
705
705
< span class ="c1 "> # All strings are unicode in Python 3.</ span >
706
706
< span class ="k "> return</ span > < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> _tensor_str</ span > < span class ="o "> .</ span > < span class ="n "> _str</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span >
707
707
708
- < div class =" viewcode-block " id =" Tensor.backward " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.backward.html#torch.Tensor.backward " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> backward</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> gradient</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> retain_graph</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> create_graph</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ):</ span >
708
+ < span class ="k "> def</ span > < span class ="nf "> backward</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> gradient</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> retain_graph</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> create_graph</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ):</ span >
709
709
< span class ="sa "> r</ span > < span class ="sd "> """Computes the gradient of current tensor w.r.t. graph leaves.</ span >
710
710
711
711
< span class ="sd "> The graph is differentiated using the chain rule. If the tensor is</ span >
@@ -761,7 +761,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
761
761
< span class ="n "> retain_graph</ span > < span class ="o "> =</ span > < span class ="n "> retain_graph</ span > < span class ="p "> ,</ span >
762
762
< span class ="n "> create_graph</ span > < span class ="o "> =</ span > < span class ="n "> create_graph</ span > < span class ="p "> ,</ span >
763
763
< span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span >
764
- < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> autograd</ span > < span class ="o "> .</ span > < span class ="n "> backward</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> gradient</ span > < span class ="p "> ,</ span > < span class ="n "> retain_graph</ span > < span class ="p "> ,</ span > < span class ="n "> create_graph</ span > < span class ="p "> ,</ span > < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span > </ div >
764
+ < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> autograd</ span > < span class ="o "> .</ span > < span class ="n "> backward</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> gradient</ span > < span class ="p "> ,</ span > < span class ="n "> retain_graph</ span > < span class ="p "> ,</ span > < span class ="n "> create_graph</ span > < span class ="p "> ,</ span > < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span >
765
765
766
766
< span class ="k "> def</ span > < span class ="nf "> register_hook</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> hook</ span > < span class ="p "> ):</ span >
767
767
< span class ="sa "> r</ span > < span class ="sd "> """Registers a backward hook.</ span >
@@ -892,13 +892,13 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
892
892
< span class ="k "> else</ span > < span class ="p "> :</ span >
893
893
< span class ="k "> return</ span > < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> flip</ span > < span class ="p "> (</ span > < span class ="mi "> 0</ span > < span class ="p "> )</ span >
894
894
895
- < span class ="k "> def</ span > < span class ="nf "> norm</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> p</ span > < span class ="o "> =</ span > < span class ="s2 "> "fro"</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> keepdim</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> dtype</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ):</ span >
895
+ < div class =" viewcode-block " id =" Tensor.norm " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.norm.html#torch.Tensor.norm " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> norm</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> p</ span > < span class ="o "> =</ span > < span class ="s2 "> "fro"</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> keepdim</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> dtype</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ):</ span >
896
896
< span class ="sa "> r</ span > < span class ="sd "> """See :func:`torch.norm`"""</ span >
897
897
< span class ="k "> if</ span > < span class ="n "> has_torch_function_unary</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
898
898
< span class ="k "> return</ span > < span class ="n "> handle_torch_function</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="o "> .</ span > < span class ="n "> norm</ span > < span class ="p "> ,</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,),</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> p</ span > < span class ="o "> =</ span > < span class ="n "> p</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="n "> dim</ span > < span class ="p "> ,</ span > < span class ="n "> keepdim</ span > < span class ="o "> =</ span > < span class ="n "> keepdim</ span > < span class ="p "> ,</ span > < span class ="n "> dtype</ span > < span class ="o "> =</ span > < span class ="n "> dtype</ span > < span class ="p "> )</ span >
899
- < span class ="k "> return</ span > < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> norm</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> p</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="p "> ,</ span > < span class ="n "> keepdim</ span > < span class ="p "> ,</ span > < span class ="n "> dtype</ span > < span class ="o "> =</ span > < span class ="n "> dtype</ span > < span class ="p "> )</ span >
899
+ < span class ="k "> return</ span > < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> norm</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> p</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="p "> ,</ span > < span class ="n "> keepdim</ span > < span class ="p "> ,</ span > < span class ="n "> dtype</ span > < span class ="o "> =</ span > < span class ="n "> dtype</ span > < span class ="p "> )</ span > </ div >
900
900
901
- < span class ="k "> def</ span > < span class ="nf "> lu</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> pivot</ span > < span class ="o "> =</ span > < span class ="kc "> True</ span > < span class ="p "> ,</ span > < span class ="n "> get_infos</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ):</ span >
901
+ < div class =" viewcode-block " id =" Tensor.lu " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.lu.html#torch.Tensor.lu " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> lu</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> pivot</ span > < span class ="o "> =</ span > < span class ="kc "> True</ span > < span class ="p "> ,</ span > < span class ="n "> get_infos</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ):</ span >
902
902
< span class ="sa "> r</ span > < span class ="sd "> """See :func:`torch.lu`"""</ span >
903
903
< span class ="c1 "> # If get_infos is True, then we don't need to check for errors and vice versa</ span >
904
904
< span class ="k "> if</ span > < span class ="n "> has_torch_function_unary</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
@@ -908,7 +908,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
908
908
< span class ="k "> if</ span > < span class ="n "> get_infos</ span > < span class ="p "> :</ span >
909
909
< span class ="k "> return</ span > < span class ="n "> LU</ span > < span class ="p "> ,</ span > < span class ="n "> pivots</ span > < span class ="p "> ,</ span > < span class ="n "> infos</ span >
910
910
< span class ="k "> else</ span > < span class ="p "> :</ span >
911
- < span class ="k "> return</ span > < span class ="n "> LU</ span > < span class ="p "> ,</ span > < span class ="n "> pivots</ span >
911
+ < span class ="k "> return</ span > < span class ="n "> LU</ span > < span class ="p "> ,</ span > < span class ="n "> pivots</ span > </ div >
912
912
913
913
< span class ="k "> def</ span > < span class ="nf "> stft</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> n_fft</ span > < span class ="p "> :</ span > < span class ="nb "> int</ span > < span class ="p "> ,</ span > < span class ="n "> hop_length</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="nb "> int</ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
914
914
< span class ="n "> win_length</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="nb "> int</ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span > < span class ="n "> window</ span > < span class ="p "> :</ span > < span class ="s1 "> 'Optional[Tensor]'</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
0 commit comments