192192 < div class ="pytorch-left-menu-search ">
193193
194194 < div class ="version ">
195- < a href ='https://pytorch.org/docs/versions.html '> master (1.10.0a0+git19454af ) ▼</ a >
195+ < a href ='https://pytorch.org/docs/versions.html '> master (1.10.0a0+git7e69745 ) ▼</ a >
196196 </ div >
197197
198198
@@ -663,7 +663,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
663663 < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span >
664664 < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> autograd</ span > < span class ="o "> .</ span > < span class ="n "> backward</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> gradient</ span > < span class ="p "> ,</ span > < span class ="n "> retain_graph</ span > < span class ="p "> ,</ span > < span class ="n "> create_graph</ span > < span class ="p "> ,</ span > < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span > </ div >
665665
666- < span class ="k "> def</ span > < span class ="nf "> register_hook</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> hook</ span > < span class ="p "> ):</ span >
666+ < div class =" viewcode-block " id =" Tensor.register_hook " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.register_hook.html#torch.Tensor.register_hook " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> register_hook</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> hook</ span > < span class ="p "> ):</ span >
667667 < span class ="sa "> r</ span > < span class ="sd "> """Registers a backward hook.</ span >
668668
669669< span class ="sd "> The hook will be called every time a gradient with respect to the</ span >
@@ -703,7 +703,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
703703 < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> grad_fn</ span > < span class ="o "> .</ span > < span class ="n "> _register_hook_dict</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span >
704704 < span class ="n "> handle</ span > < span class ="o "> =</ span > < span class ="n "> hooks</ span > < span class ="o "> .</ span > < span class ="n "> RemovableHandle</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> _backward_hooks</ span > < span class ="p "> )</ span >
705705 < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> _backward_hooks</ span > < span class ="p "> [</ span > < span class ="n "> handle</ span > < span class ="o "> .</ span > < span class ="n "> id</ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="n "> hook</ span >
706- < span class ="k "> return</ span > < span class ="n "> handle</ span >
706+ < span class ="k "> return</ span > < span class ="n "> handle</ span > </ div >
707707
708708 < span class ="k "> def</ span > < span class ="nf "> reinforce</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> reward</ span > < span class ="p "> ):</ span >
709709 < span class ="k "> def</ span > < span class ="nf "> trim</ span > < span class ="p "> (</ span > < span class ="nb "> str</ span > < span class ="p "> ):</ span >
@@ -772,7 +772,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
772772 < span class ="k "> return</ span > < span class ="n "> handle_torch_function</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="o "> .</ span > < span class ="n "> is_shared</ span > < span class ="p "> ,</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,),</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span >
773773 < span class ="k "> return</ span > < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> storage</ span > < span class ="p "> ()</ span > < span class ="o "> .</ span > < span class ="n "> is_shared</ span > < span class ="p "> ()</ span > </ div >
774774
775- < span class ="k "> def</ span > < span class ="nf "> share_memory_</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
775+ < div class =" viewcode-block " id =" Tensor.share_memory_ " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.share_memory_.html#torch.Tensor.share_memory_ " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> share_memory_</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
776776 < span class ="sa "> r</ span > < span class ="sd "> """Moves the underlying storage to shared memory.</ span >
777777
778778< span class ="sd "> This is a no-op if the underlying storage is already in shared memory</ span >
@@ -781,7 +781,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
781781 < span class ="k "> if</ span > < span class ="n "> has_torch_function_unary</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
782782 < span class ="k "> return</ span > < span class ="n "> handle_torch_function</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="o "> .</ span > < span class ="n "> share_memory_</ span > < span class ="p "> ,</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,),</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span >
783783 < span class ="bp "> self</ span > < span class ="o "> .</ span > < span class ="n "> storage</ span > < span class ="p "> ()</ span > < span class ="o "> .</ span > < span class ="n "> share_memory_</ span > < span class ="p "> ()</ span >
784- < span class ="k "> return</ span > < span class ="bp "> self</ span >
784+ < span class ="k "> return</ span > < span class ="bp "> self</ span > </ div >
785785
786786 < span class ="k "> def</ span > < span class ="fm "> __reversed__</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
787787 < span class ="sa "> r</ span > < span class ="sd "> """Reverses the tensor along dimension 0."""</ span >
@@ -880,7 +880,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
880880 < span class ="kn "> from</ span > < span class ="nn "> torch.autograd._functions</ span > < span class ="kn "> import</ span > < span class ="n "> Resize</ span >
881881 < span class ="k "> return</ span > < span class ="n "> Resize</ span > < span class ="o "> .</ span > < span class ="n "> apply</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> tensor</ span > < span class ="o "> .</ span > < span class ="n "> size</ span > < span class ="p "> ())</ span >
882882
883- < span class ="k "> def</ span > < span class ="nf "> split</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> split_size</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="mi "> 0</ span > < span class ="p "> ):</ span >
883+ < div class =" viewcode-block " id =" Tensor.split " > < a class =" viewcode-back " href =" ../../generated/torch.Tensor.split.html#torch.Tensor.split " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> split</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="n "> split_size</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="mi "> 0</ span > < span class ="p "> ):</ span >
884884 < span class ="sa "> r</ span > < span class ="sd "> """See :func:`torch.split`</ span >
885885< span class ="sd "> """</ span >
886886 < span class ="k "> if</ span > < span class ="n "> has_torch_function_unary</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ):</ span >
@@ -894,7 +894,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
894894 < span class ="k "> except</ span > < span class ="ne "> ValueError</ span > < span class ="p "> :</ span >
895895 < span class ="k "> return</ span > < span class ="nb "> super</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="p "> ,</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span > < span class ="o "> .</ span > < span class ="n "> split_with_sizes</ span > < span class ="p "> (</ span > < span class ="n "> split_size</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="p "> )</ span >
896896 < span class ="k "> else</ span > < span class ="p "> :</ span >
897- < span class ="k "> return</ span > < span class ="nb "> super</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="p "> ,</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span > < span class ="o "> .</ span > < span class ="n "> split_with_sizes</ span > < span class ="p "> (</ span > < span class ="n "> split_size</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="p "> )</ span >
897+ < span class ="k "> return</ span > < span class ="nb "> super</ span > < span class ="p "> (</ span > < span class ="n "> Tensor</ span > < span class ="p "> ,</ span > < span class ="bp "> self</ span > < span class ="p "> )</ span > < span class ="o "> .</ span > < span class ="n "> split_with_sizes</ span > < span class ="p "> (</ span > < span class ="n "> split_size</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="p "> )</ span > </ div >
898898
899899 < span class ="k "> def</ span > < span class ="nf "> unique</ span > < span class ="p "> (</ span > < span class ="bp "> self</ span > < span class ="p "> ,</ span > < span class ="nb "> sorted</ span > < span class ="o "> =</ span > < span class ="kc "> True</ span > < span class ="p "> ,</ span > < span class ="n "> return_inverse</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> return_counts</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span > < span class ="n "> dim</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ):</ span >
900900 < span class ="sa "> r</ span > < span class ="sd "> """Returns the unique elements of the input tensor.</ span >
0 commit comments