
    Ǆgy              *       :   d dl mZmZmZmZmZ d dlZd dlmZ ddlm	Z	m
Z
mZmZmZmZmZmZmZmZmZmZmZmZmZmZmZ ddgZ G d de      Zd	d
e de de	 de de dz   e_        dee   dee   dee   dee   dee   dee   dee   dee   dedededeeef   dedededededef$dZdee   dee   dee   dee   dee   dee   dee   dee   dedededeeef   dedededededef$d Z dee   dee   dee   dee   dee   dee   dee   dee   dedededeeef   dedededededed!df&d"Z! ee#      	 	 	 	 	 	 	 d'dee   dee   dee   dee   dee   dee   d$ee   deded%ee   dee   dee   dededededeeef   dededef(d&       Z"y)(    )castListOptionalTupleUnionN)Tensor   )_capturable_doc_default_to_fused_or_foreach_device_dtype_check_for_fused_differentiable_doc_disable_dynamo_if_unsupported_foreach_doc
_fused_doc!_get_capturable_supported_devices_get_scalar_dtype
_get_value_maximize_doc_stack_if_compiling_use_grad_for_differentiable_view_as_real
DeviceDict	OptimizerParamsTAdamWadamwc                        e Zd Z	 	 	 	 	 ddddddddedeeef   deeef   deded	ed
ede	e   dedede	e   f fdZ
 fdZd Zedd       Z xZS )r   FN)maximizeforeach
capturabledifferentiablefusedparamslrbetasepsweight_decayamsgradr   r   r    r!   r"   c                   t        |t              r-|r|	st        d      |j                         dk7  rt        d      d|k  st        d|       d|k  st        d|       d|d   cxk  rdk  sn t        d	|d          d|d   cxk  rdk  sn t        d
|d          d|k  st        d|       t	        ||||||||	|
|
      }t
        |   ||       |r"|
rt        d      d| _        |rt        d      y y )NElr as a Tensor is not supported for capturable=False and foreach=Truer	   zTensor lr must be 1-element        zInvalid learning rate: zInvalid epsilon value: r         ?z#Invalid beta parameter at index 0: z#Invalid beta parameter at index 1: zInvalid weight_decay value: )
r$   r%   r&   r'   r(   r   r   r    r!   r"   z)`fused` does not support `differentiable`Tz0`fused` and `foreach` cannot be `True` together.)	
isinstancer   
ValueErrornumeldictsuper__init__RuntimeError_step_supports_amp_scaling)selfr#   r$   r%   r&   r'   r(   r   r   r    r!   r"   defaults	__class__s                Y/home/mcse/projects/flask_80/flask-venv/lib/python3.12/site-packages/torch/optim/adamw.pyr2   zAdamW.__init__!   sJ    b&!z [  xxzQ !>??by6rd;<<cz6se<==eAh$$B58*MNNeAh$$B58*MNNl";L>JKK%!)
 	*"#NOO.2D+"#UVV 	     c                    t         |   |       | j                  D ]#  }|j                  dd       |j                  dd       |j                  dd        |j                  dd       |j                  dd       |j                  dd       }|d   D ]  }| j                  j                  |g       }t        |      d	k7  s.t        j                  |d
         rGt        |d
         }|d   s|d   r,t        j                  |t        |      |j                        nt        j                  |t                     |d
<    & y )Nr(   Fr   r   r    r!   r"   r#   r   stepis_fuseddtypedevicer?   )r1   __setstate__param_groups
setdefaultstategetlentorch	is_tensorfloattensorr   r@   )r5   rE   groupr"   pp_statestep_valr7   s          r8   rB   zAdamW.__setstate__V   s%   U#&& 	EY.Z/Y-\51-u5$$Wd3E8_ **..B/w<1$U__WV_-M$WV_5H !.%. $"3U"C#$88 #\\(:K:MN FO		r9   c	                 B   d}	|d   D ]  }
|
j                   |	t        j                  |
      z  }	|j                  |
       |
j                   j                  rt        d      |j                  |
j                          | j                  |
   }t        |      dk(  r|d   rt        |
       |d   s|d   r/t        j                  dt        |d         |
j                  	      nt        j                  d
t                     |d<   t        j                  |
t        j                        |d<   t        j                  |
t        j                        |d<   |r(t        j                  |
t        j                        |d<   |j                  |d          |j                  |d          |d   r|j                  |d          |d   r|d   j                  rt        d      |d   r#t!        |d   t"              r|d   st        d      |j                  |d           |	S )NFr#   z'AdamW does not support sparse gradientsr   r"   r     r<   r>   r+   rA   r;   )memory_formatexp_avg
exp_avg_sqmax_exp_avg_sqr(   r!   zB`requires_grad` is not supported for `step` in differentiable moder   r$   r*   )gradrH   
is_complexappend	is_sparser3   rE   rG   r   zerosr   r@   rK   
zeros_likepreserve_formatrequires_gradr-   r   )r5   rL   params_with_gradgradsr(   exp_avgsexp_avg_sqsmax_exp_avg_sqsstate_stepshas_complexrM   rE   s               r8   _init_groupzAdamW._init_groupm   s    x <	.Avv~5++A..K##A&vv"#LMMLL JJqME 5zQ>1!4 \*eGn KK/wH xx c1B1DE f $)#3#3U%:%:$i  ',&6&6U%:%:'l# .3.>.>)>)>/E*+ OOE),-u\23Y&&u-='>?%&5=+F+F"X  i uT{F3l+"[  uV}-y<	.z r9   c                    | j                          d}|$t        j                         5   |       }ddd       | j                  D ]  }g }g }g }g }g }g }	|d   }
t	        t
        t        t        f   |d         \  }}| j                  ||||
||||	      }t        ||||||	f|
|||d   |d   |d   |d   |d   |d	   |d
   |d   t        | dd      t        | dd      |d  |S # 1 sw Y   xY w)zPerform a single optimization step.

        Args:
            closure (Callable, optional): A closure that reevaluates the model
                and returns the loss.
        Nr(   r%   r$   r'   r&   r   r   r    r!   r"   
grad_scale	found_inf)r(   beta1beta2r$   r'   r&   r   r   r    r!   r"   rg   rh   rd   )
 _cuda_graph_capture_health_checkrH   enable_gradrC   r   r   rJ   re   r   getattr)r5   closurelossrL   r^   r_   r`   ra   rb   rc   r(   ri   rj   rd   s                 r8   r;   z
AdamW.step   sV    	--/""$ !y! && *	E-/"$E%'H(*K,.O(*K!),GeUl 3U7^DLE5** 	K    ;">2%Lz*i( .$%56Gn"4t<!$T:')+*	X _! !s   C$$C-)gMbP?)g?g+?g:0yE>g{Gz?FN)__name__
__module____qualname__r   r   rJ   r   r   boolr   r2   rB   re   r   r;   __classcell__)r7   s   @r8   r   r       s     $(%1"3W "& $ $3W3W %- 3W UE\"	3W
 3W 3W 3W 3W $3W 3W 3W ~3Wj.IV ": ":r9   a  Implements AdamW algorithm.

    .. math::
       \begin{aligned}
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{input}      : \gamma \text{(lr)}, \: \beta_1, \beta_2
                \text{(betas)}, \: \theta_0 \text{(params)}, \: f(\theta) \text{(objective)},
                \: \epsilon \text{ (epsilon)}                                                    \\
            &\hspace{13mm}      \lambda \text{(weight decay)},  \: \textit{amsgrad},
                \: \textit{maximize}                                                             \\
            &\textbf{initialize} : m_0 \leftarrow 0 \text{ (first moment)}, v_0 \leftarrow 0
                \text{ ( second moment)}, \: \widehat{v_0}^{max}\leftarrow 0              \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{for} \: t=1 \: \textbf{to} \: \ldots \: \textbf{do}                         \\

            &\hspace{5mm}\textbf{if} \: \textit{maximize}:                                       \\
            &\hspace{10mm}g_t           \leftarrow   -\nabla_{\theta} f_t (\theta_{t-1})          \\
            &\hspace{5mm}\textbf{else}                                                           \\
            &\hspace{10mm}g_t           \leftarrow   \nabla_{\theta} f_t (\theta_{t-1})           \\
            &\hspace{5mm} \theta_t \leftarrow \theta_{t-1} - \gamma \lambda \theta_{t-1}         \\
            &\hspace{5mm}m_t           \leftarrow   \beta_1 m_{t-1} + (1 - \beta_1) g_t          \\
            &\hspace{5mm}v_t           \leftarrow   \beta_2 v_{t-1} + (1-\beta_2) g^2_t          \\
            &\hspace{5mm}\widehat{m_t} \leftarrow   m_t/\big(1-\beta_1^t \big)                   \\
            &\hspace{5mm}\widehat{v_t} \leftarrow   v_t/\big(1-\beta_2^t \big)                   \\
            &\hspace{5mm}\textbf{if} \: amsgrad                                                  \\
            &\hspace{10mm}\widehat{v_t}^{max} \leftarrow \mathrm{max}(\widehat{v_t}^{max},
                \widehat{v_t})                                                                   \\
            &\hspace{10mm}\theta_t \leftarrow \theta_t - \gamma \widehat{m_t}/
                \big(\sqrt{\widehat{v_t}^{max}} + \epsilon \big)                                 \\
            &\hspace{5mm}\textbf{else}                                                           \\
            &\hspace{10mm}\theta_t \leftarrow \theta_t - \gamma \widehat{m_t}/
                \big(\sqrt{\widehat{v_t}} + \epsilon \big)                                       \\
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
            &\bf{return} \:  \theta_t                                                     \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
       \end{aligned}

    For further details regarding the algorithm we refer to `Decoupled Weight Decay Regularization`_.
    a  
    Args:
        params (iterable): iterable of parameters to optimize or dicts defining
            parameter groups
        lr (float, Tensor, optional): learning rate (default: 1e-3). A tensor LR
            is not yet supported for all our implementations. Please use a float
            LR if you are not also specifying fused=True or capturable=True.
        betas (Tuple[float, float], optional): coefficients used for computing
            running averages of gradient and its square (default: (0.9, 0.999))
        eps (float, optional): term added to the denominator to improve
            numerical stability (default: 1e-8)
        weight_decay (float, optional): weight decay coefficient (default: 1e-2)
        amsgrad (bool, optional): whether to use the AMSGrad variant of this
            algorithm from the paper `On the Convergence of Adam and Beyond`_
            (default: False)
        z	
        a8  
    .. Note::
        A prototype implementation of Adam and AdamW for MPS supports `torch.float32` and `torch.float16`.
    .. _Decoupled Weight Decay Regularization:
        https://arxiv.org/abs/1711.05101
    .. _On the Convergence of Adam and Beyond:
        https://openreview.net/forum?id=ryQu7f-RZ

    r#   r_   r`   ra   rb   rc   rg   rh   r(   ri   rj   r$   r'   r&   r   r    r!   rd   c       
            ||J t         j                  j                         rt        |t              sJ t        |       D ];  \  }}|s||   n||    }||   }||   }||   }t         j                  j                         s\|rZt               }|j                  j                  |j                  j                  k(  r|j                  j                  |v sJ d| d       t        j                  |      rqt        j                  |      }t        j                  |      }t        j                  |      }|rt        j                  ||         ||<   t        j                  |      }|dz  }|j                  d||z  z
         |j                  |d|	z
         |j                  |
      j                  ||d|
z
         |s|r|}d|	|z  z
  }d|
|z  z
  }||z  }|j!                         }|j#                         }|ro|r||   j%                         }n||   }||   j'                  t        j(                  ||             ||   j#                         ||z  z  j+                  ||z        } n(|j#                         ||z  z  j+                  ||z        } |j-                  ||        nt/        |      }d|	|z  z
  }d|
|z  z
  }||z  }|dz  }|rDt        j(                  ||   |||          ||   j#                         |z  j+                  |      } n"|j#                         |z  j+                  |      } |j-                  || |        |st        j                  | |         s!t        j0                  ||         ||<   > y )NIIf capturable=True, params and state_steps must be on supported devices: .r	   )value      ?)out)rH   jitis_scriptingr-   rJ   	enumerate_utilsis_compilingr   r@   typerW   view_as_realmul_lerp_addcmul_negsqrtclonecopy_maximumadd_addcdiv_r   view_as_complex)!r#   r_   r`   ra   rb   rc   rg   rh   r(   ri   rj   r$   r'   r&   r   r    r!   rd   iparamrV   rS   rT   step_tcapturable_supported_devicesr;   bias_correction1bias_correction2	step_sizestep_size_negbias_correction2_sqrtrU   denoms!                                    r8   _single_tensor_adamwr   =  s   * )"333yy "e$$$f% WK5'uQxeAhY1+ ^
Q ||((*z+L+N(!!V]]%7%77LL%%)EE{ [[wZxxyz{F E"%%d+D((1G++J7J%*%7%78J%K"&&u-E 	! 	

1rL(() 	dAI&''d!e)'DD 5$; 5$;--I%MMOM$4$9$9$;!!%4Q%7%=%=%?N%4Q%7N"((~z)RS $A&++-1F1VW$s]*+ 
 OO%)>)NO$s]*+  NN7E*f%D 5$; 5$;--I$4c$9!oa0*/RSBTU )+0025JJPPQTU#*-BBHHMNN7E)N< u''q	2!&!6!6q7I!JOAoWKr9   c       
   	      	  ' t        |       dk(  ry t        |t              r|st        d      t        j
                  j                         s7|r5t        d      't        'fdt        | |      D              sJ d' d       |rJ d       ||J t        j                  | |||||g      }|j                         D ]"  \  \  }}}}}}}t        t        t           |      }t        t        t           |      }t        t        t           |      }t        t        t           |      }t        t        t           |      }|r7|r't        t        t           |      }t        |||||       nt        ||||       |rt	        j                   |      }t        j
                  j                         s=|d   j"                  r.t	        j$                  |t	        j&                  d	d
      d	       nt	        j$                  |d       |dk7  rt	        j(                  |d||z  z
         t	        j*                  ||d|	z
         t	        j(                  ||
       t	        j,                  |||d|
z
         ~|rft	        j.                  |	|      } t	        j.                  |
|      }!t	        j0                  | d       t	        j0                  |!d       t	        j2                  |!       t	        j4                  | |       t	        j6                  |        t	        j8                  |!       | }"|!}#|rCt        t        t           |      }t	        j:                  ||       t	        j<                  |      }$nt	        j<                  |      }$t	        j4                  |$|#       t	        j$                  |$|       t	        j4                  |$|"       t	        j>                  |||$       |D %cg c]  }%d|	tA        |%      z  z
   } }%|D %cg c]  }%d|
tA        |%      z  z
   }!}%tC        | D &cg c]
  }&||&z  dz   c}&      }"|!D &cg c]  }&|&dz  	 }#}&|rCt        t        t           |      }t	        j:                  ||       t	        j<                  |      }$nt	        j<                  |      }$t	        j4                  |$|#       t	        j$                  |$|       t	        j>                  |||$|"       % y c c}%w c c}%w c c}&w c c}&w )Nr   r*   F)supports_xlac              3      K   | ]N  \  }}|j                   j                  |j                   j                  k(  xr |j                   j                  v  P y wrp   )r@   r   ).0rM   r;   r   s      r8   	<genexpr>z&_multi_tensor_adamw.<locals>.<genexpr>  sQ      
 4 HHMMT[[--- >!==>
s   AArw   rx   z#_foreach ops don't support autogradr,   cpu)r@   )alphar	   rz   )"rG   r-   r   r3   rH   r   r   r   allzipr   "_group_tensors_by_device_and_dtypevaluesr   r   r   _foreach_negis_cpu_foreach_add_rK   _foreach_mul__foreach_lerp__foreach_addcmul__foreach_pow_foreach_sub__foreach_neg__foreach_div__foreach_reciprocal__foreach_sqrt__foreach_maximum__foreach_sqrt_foreach_addcdiv_r   r   )(r#   r_   r`   ra   rb   rc   rg   rh   r(   ri   rj   r$   r'   r&   r   r    r!   rd   grouped_tensorsdevice_params_device_grads_device_exp_avgs_device_exp_avg_sqs_device_max_exp_avg_sqs_device_state_steps__device_paramsdevice_gradsdevice_exp_avgsdevice_exp_avg_sqsdevice_state_stepsdevice_max_exp_avg_sqsr   r   r   r   exp_avg_sq_sqrtr;   bcr   s(                                          @r8   _multi_tensor_adamwr     s   * 6{a"fjS
 	

 <<$$&:'H(
$  
 v{3
 
 	w WWsVttuv		w 
 DDD)"333BB	+LO ""$D 		 	T&\>:DL-8tF|-=>!$v,0CD!$v,0CD)-d6l<S)T&! #&* !<BT  --l;L ||((*/A!/D/K/K"ELLU$C3  2A6 1q23D/DE 	_lAIF.6lAI	

  $11%9KL$11%9KL 0!4 0!4 01  0"5&&'78  !12
 )I$4!)-d6l<S)T& ''(>@RS #("5"56L"M"'"5"56H"I1FG5; ##M?OT ;M 26EZ---    ;M 26EZ---    ,FV,Wb2g^,WXI #3%C%! % )-d6l<S)T& ''(>@RS #("5"56L"M"'"5"56H"I1FG5##	DJ   -X%s   S$7S)S.
4S3returnc       
         "   | sy |rt        d      ||j                  |ini }||j                  |ini }t        |t              r&t	        |j                        dk7  r|j                  |ind }t        j                  | |||||g      }|j                         D ]t  \  \  }}\  \  }}}}}}}t        t        t           |      }t        t        t           |      }t        t        t           |      } t        t        t           |      }!t        t        t           |      }"|j                  dk(  r||J d\  }#}$|#|j                  ||j                  |d            }#|#|j                  ||j                  |d            }$|'||vr#|j                  ||j                  |d            }t        j                  |"d       t        j                  ||| |!||"|||	|
||||#|$	       |$Rt        j                   |"|$gt#        |"      z         w y )
Nz9Adam with fused=True does not support differentiable=Truer   mps)NNT)non_blocking)r@   r   r	   )	r(   r$   ri   rj   r'   r&   r   rg   rh   )r3   r@   r-   r   strr   r   itemsr   r   r   rD   torH   r   _fused_adamw_r   rG   )%r#   r_   r`   ra   rb   rc   rg   rh   r(   ri   rj   r$   r'   r&   r   r    r!   rd   grad_scale_dictfound_inf_dictlr_dictr   r@   r   r   r   r   r   r   r   r   r   r   r   r   device_grad_scaledevice_found_infs%                                        r8   _fused_adamwr   j  s^   * VWW ,6+A		J'r  *3)>		9%B  &b&1c"))n6MBSW   BB	+LO 
			 6 
	 
	
"	T&\>:DL-8tF|-=>!$v,0CD!$v,0CD;;%$);;;.8++! / : :
f4@!  -88	V$?  6#8##V$?B 	.2"%(&	
" '"%5$6=O9P$Pi6r9   )single_tensor_fnr   r"   c                h   t         j                  j                         st        d |D              st	        d      |	)|'t        | |d      \  }}|rt        |t              r|sd}|	d}	|d}|r)t         j                  j                         rt	        d      |	r)t         j                  j                         rt	        d      |	r%t         j                  j                         st        }n-|r%t         j                  j                         st        }nt        } || |||||||||||||||
||       y)	zpFunctional API that performs AdamW algorithm computation.

    See :class:`~torch.optim.AdamW` for details.
    c              3   P   K   | ]  }t        |t        j                           y wrp   )r-   rH   r   )r   ts     r8   r   zadamw.<locals>.<genexpr>  s       3()
1ell#3s   $&zPAPI has changed, `state_steps` argument must contain a list of singleton tensorsNF)	use_fusedz6torch.jit.script not supported with foreach optimizersz4torch.jit.script not supported with fused optimizers)r(   ri   rj   r$   r'   r&   r   r    r!   rg   rh   rd   )rH   r   r   r   r3   r   r-   r   r|   r}   r   r   r   )r#   r_   r`   ra   rb   rc   r   r    r!   r"   rg   rh   rd   r(   ri   rj   r$   r'   r&   r   r   funcs                         r8   r   r     s0   < <<$$&s 3-83 0 ^
 	
 }1Ne

7 z"f-jG}599))+STT'')QRRUYY++-	//1"#!%%r9   )NFFNNNF)#typingr   r   r   r   r   rH   r   	optimizerr
   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   __all__r   __doc__rt   rJ   r   r   r   r   rQ   r9   r8   <module>r      s   6 5      * G
SI Sn&N	 
 		 		 		 		 'OB NtKLtK<tK 6ltK f	tK
 &\tK ftK  tK tK tK tK tK 	femtK tK 
tK  !tK" #tK$ %tK& 'tKnsLs<s 6ls f	s
 &\s fs  s s s s s 	fems s 
s  !s" #s$ %s& 'sl`L`<` 6l` f	`
 &\` f`  ` ` ` ` ` 	fem` ` 
`  !`" #`$ %`& '`( 
)`F  1EF #  #'"&SLS<S 6lS f	S
 &\S fS d^S S S D>S  S S S" #S$ %S& 'S( 	eVm)S* +S, 
-S. /S GSr9   