188
188
189
189
190
190
< div class ="version ">
191
- master (1.9.0a0+git4ec6b36 )
191
+ master (1.9.0a0+git49244d5 )
192
192
</ div >
193
193
194
194
@@ -455,7 +455,7 @@ <h1>Source code for torch.autograd</h1><div class="highlight"><pre>
455
455
< span class ="n "> retain_graph</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="nb "> bool</ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
456
456
< span class ="n "> create_graph</ span > < span class ="p "> :</ span > < span class ="nb "> bool</ span > < span class ="o "> =</ span > < span class ="kc "> False</ span > < span class ="p "> ,</ span >
457
457
< span class ="n "> grad_variables</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="n "> _TensorOrTensors</ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
458
- < span class ="n "> inputs</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="n "> Sequence </ span > < span class ="p "> [ </ span > < span class =" n " > torch </ span > < span class =" o " > . </ span > < span class =" n " > Tensor </ span > < span class =" p " > ] ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
458
+ < span class ="n "> inputs</ span > < span class ="p "> :</ span > < span class ="n "> Optional</ span > < span class ="p "> [</ span > < span class ="n "> _TensorOrTensors </ span > < span class ="p "> ]</ span > < span class ="o "> =</ span > < span class ="kc "> None</ span > < span class ="p "> ,</ span >
459
459
< span class ="p "> )</ span > < span class ="o "> -></ span > < span class ="kc "> None</ span > < span class ="p "> :</ span >
460
460
< span class ="sa "> r</ span > < span class ="sd "> """Computes the sum of gradients of given tensors w.r.t. graph leaves.</ span >
461
461
@@ -487,24 +487,24 @@ <h1>Source code for torch.autograd</h1><div class="highlight"><pre>
487
487
< span class ="sd "> :ref:`Stream semantics of backward passes<bwd-cuda-stream-semantics>`.</ span >
488
488
489
489
< span class ="sd "> Args:</ span >
490
- < span class ="sd "> tensors (sequence of Tensor): Tensors of which the derivative will be</ span >
490
+ < span class ="sd "> tensors (Sequence[Tensor] or Tensor): Tensors of which the derivative will be</ span >
491
491
< span class ="sd "> computed.</ span >
492
- < span class ="sd "> grad_tensors (sequence of ( Tensor or None)) : The "vector" in the Jacobian-vector </ span >
493
- < span class ="sd "> product, usually gradients w.r.t. each element of corresponding tensors. </ span >
494
- < span class ="sd "> None values can be specified for scalar Tensors or ones that don't require </ span >
495
- < span class ="sd "> grad. If a None value would be acceptable for all grad_tensors, then this </ span >
496
- < span class ="sd "> argument is optional.</ span >
492
+ < span class ="sd "> grad_tensors (Sequence[ Tensor or None] or Tensor, optional) : The "vector" in</ span >
493
+ < span class ="sd "> the Jacobian-vector product, usually gradients w.r.t. each element of</ span >
494
+ < span class ="sd "> corresponding tensors. None values can be specified for scalar Tensors or</ span >
495
+ < span class ="sd "> ones that don't require grad. If a None value would be acceptable for all</ span >
496
+ < span class ="sd "> grad_tensors, then this argument is optional.</ span >
497
497
< span class ="sd "> retain_graph (bool, optional): If ``False``, the graph used to compute the grad</ span >
498
498
< span class ="sd "> will be freed. Note that in nearly all cases setting this option to ``True``</ span >
499
499
< span class ="sd "> is not needed and often can be worked around in a much more efficient</ span >
500
500
< span class ="sd "> way. Defaults to the value of ``create_graph``.</ span >
501
501
< span class ="sd "> create_graph (bool, optional): If ``True``, graph of the derivative will</ span >
502
502
< span class ="sd "> be constructed, allowing to compute higher order derivative products.</ span >
503
503
< span class ="sd "> Defaults to ``False``.</ span >
504
- < span class ="sd "> inputs (sequence of Tensor): Inputs w.r.t. which the gradient will be </ span >
505
- < span class ="sd "> accumulated into ``.grad``. All other Tensors will be ignored. If not </ span >
506
- < span class ="sd "> provided, the gradient is accumulated into all the leaf Tensors that were </ span >
507
- < span class ="sd "> used to compute the attr::tensors. All the provided inputs must be leaf</ span >
504
+ < span class ="sd "> inputs (Sequence[Tensor] or Tensor, optional ): Inputs w.r.t. which the gradient</ span >
505
+ < span class ="sd "> be will accumulated into ``.grad``. All other Tensors will be ignored. If</ span >
506
+ < span class ="sd "> not provided, the gradient is accumulated into all the leaf Tensors that</ span >
507
+ < span class ="sd "> were used to compute the attr::tensors. All the provided inputs must be leaf</ span >
508
508
< span class ="sd "> Tensors.</ span >
509
509
< span class ="sd "> """</ span >
510
510
< span class ="k "> if</ span > < span class ="n "> grad_variables</ span > < span class ="ow "> is</ span > < span class ="ow "> not</ span > < span class ="kc "> None</ span > < span class ="p "> :</ span >
@@ -519,7 +519,8 @@ <h1>Source code for torch.autograd</h1><div class="highlight"><pre>
519
519
< span class ="k "> raise</ span > < span class ="ne "> RuntimeError</ span > < span class ="p "> (</ span > < span class ="s2 "> "'inputs' argument to backward() cannot be empty."</ span > < span class ="p "> )</ span >
520
520
521
521
< span class ="n "> tensors</ span > < span class ="o "> =</ span > < span class ="p "> (</ span > < span class ="n "> tensors</ span > < span class ="p "> ,)</ span > < span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> tensors</ span > < span class ="p "> ,</ span > < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> Tensor</ span > < span class ="p "> )</ span > < span class ="k "> else</ span > < span class ="nb "> tuple</ span > < span class ="p "> (</ span > < span class ="n "> tensors</ span > < span class ="p "> )</ span >
522
- < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="nb "> tuple</ span > < span class ="p "> (</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span > < span class ="k "> if</ span > < span class ="n "> inputs</ span > < span class ="ow "> is</ span > < span class ="ow "> not</ span > < span class ="kc "> None</ span > < span class ="k "> else</ span > < span class ="nb "> tuple</ span > < span class ="p "> ()</ span >
522
+ < span class ="n "> inputs</ span > < span class ="o "> =</ span > < span class ="p "> (</ span > < span class ="n "> inputs</ span > < span class ="p "> ,)</ span > < span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> inputs</ span > < span class ="p "> ,</ span > < span class ="n "> torch</ span > < span class ="o "> .</ span > < span class ="n "> Tensor</ span > < span class ="p "> )</ span > < span class ="k "> else</ span > \
523
+ < span class ="nb "> tuple</ span > < span class ="p "> (</ span > < span class ="n "> inputs</ span > < span class ="p "> )</ span > < span class ="k "> if</ span > < span class ="n "> inputs</ span > < span class ="ow "> is</ span > < span class ="ow "> not</ span > < span class ="kc "> None</ span > < span class ="k "> else</ span > < span class ="nb "> tuple</ span > < span class ="p "> ()</ span >
523
524
524
525
< span class ="n "> grad_tensors_</ span > < span class ="o "> =</ span > < span class ="n "> _tensor_or_tensors_to_tuple</ span > < span class ="p "> (</ span > < span class ="n "> grad_tensors</ span > < span class ="p "> ,</ span > < span class ="nb "> len</ span > < span class ="p "> (</ span > < span class ="n "> tensors</ span > < span class ="p "> ))</ span >
525
526
< span class ="n "> grad_tensors_</ span > < span class ="o "> =</ span > < span class ="n "> _make_grads</ span > < span class ="p "> (</ span > < span class ="n "> tensors</ span > < span class ="p "> ,</ span > < span class ="n "> grad_tensors_</ span > < span class ="p "> )</ span >
0 commit comments