
    $hr              )          d dl mZmZmZmZ d dlZd dlmZ ddlmZm	Z	m
Z
mZmZmZmZmZmZmZmZmZmZ d dlmZ ddgZ G d	 de          Zd
de de de de de dz   e_        	 	 	 	 	 	 	 d(dee         dee         dee         dee         dee         dee         dee         dededee         dee         dee         dededededeeef         d ed!ed"ef(d#Zdee         dee         dee         dee         dee         dee         dee         dee         dededededeeef         d ed!ed"ededef$d$Zdee         dee         dee         dee         dee         dee         dee         dee         dededededeeef         d ed!ed"ededef$d%Zdee         dee         dee         dee         dee         dee         dee         dee         dededededeeef         d ed!ed"ededed&df&d'ZdS ))    )ListOptionalUnionTupleN)Tensor   )	OptimizerParamsT_use_grad_for_differentiable
_get_value_stack_if_compiling_dispatch_sqrt_default_to_fused_or_foreach_capturable_doc_differentiable_doc_foreach_doc
_fused_doc_maximize_doc_view_as_real)$_get_fused_kernels_supported_devicesAdamadamc                        e Zd Z	 	 	 	 	 dddddddded	eeef         d
eeef         dededede	e         dededede	e         f fdZ
 fdZd Zedd            Z xZS )r   MbP?g?g+?:0yE>r   FN)foreachmaximize
capturabledifferentiablefusedparamslrbetasepsweight_decayamsgradr   r   r   r    r!   c                   d|k    st          d|           t          |t                    r|r|	st          d          d|k    st          d|           d|d         cxk    rdk     sn t          d|d                    d|d         cxk    rdk     sn t          d	|d                    d|k    st          d
|           t          ||||||||	|
|
  
        }t	                                          ||           |rj|
rt          d          d| _        t                      t          fd| j
        D                       st          d d          |rt          d          d S d S )N        zInvalid learning rate: Elr as a Tensor is not supported for capturable=False and foreach=TruezInvalid epsilon value: r         ?z#Invalid beta parameter at index 0: r   z#Invalid beta parameter at index 1: zInvalid weight_decay value: )
r#   r$   r%   r&   r'   r   r   r   r    r!   z)`fused` does not support `differentiable`Tc              3   r   K   | ]1}|d          D ]&}|j         j        v ot          j        |          V  '2dS )r"   N)devicetypetorchis_floating_point).0pgpfused_supported_devicess      P/var/www/html/auto_sub_bot/venv/lib/python3.11/site-packages/torch/optim/adam.py	<genexpr>z Adam.__init__.<locals>.<genexpr>8   so        /1PRS[P\ KL !88 +'**          zX`fused=True` requires all the params to be floating point Tensors of supported devices: .z0`fused` and `foreach` cannot be `True` together.)
ValueError
isinstancer   dictsuper__init__RuntimeError_step_supports_amp_scalingr   allparam_groups)selfr"   r#   r$   r%   r&   r'   r   r   r   r    r!   defaultsr4   	__class__s                @r5   r=   zAdam.__init__   s6    byy;r;;<<<b&!! 	fg 	fj 	fdeeeczz<s<<===eAh$$$$$$$$M58MMNNNeAh$$$$$$$$M58MMNNNl""JLJJKKK2U%17!)7z'5UD D D 	*** 	W P"#NOOO.2D+
 'K&L&L#    595F     U # $T9P$T $T $T U U U W"#UVVV!	W 	WW Wr7   c                    t                                          |           | j        D ]}|                    dd           |                    dd           |                    dd            |                    dd           |                    dd           |                    dd            t	          | j                                                  }t          |          dk    ot          j	        |d         d	                   }|s;|D ]:}t          j
        t          |d	                   t          j        
          |d	<   9d S d S )Nr'   Fr   r   r   r    r!   r   stepdtype)r<   __setstate__rA   
setdefaultliststatevalueslenr/   	is_tensortensorfloatfloat32)rB   rL   groupstate_valuesstep_is_tensorsrD   s         r5   rI   zAdam.__setstate__A   sV   U###& 	, 	,EY...Z///Y---\5111-u555Wd++++DJ--//00l++q0^eolSToV\F]6^6^ 	P! P P!Lqy)9)9OOO&			P 	PP Pr7   c                 l   d}|d         D ]'}	|	j         |t          j        |	          z  }|                    |	           |	j         j        rt          d          |                    |	j                    | j        |	         }
t          |
          dk    r|d         s|d         r&t          j        dt          j	        |	j
                  nt          j        d	t          j	        
          |
d<   t          j        |	t          j                  |
d<   t          j        |	t          j                  |
d<   |d         r#t          j        |	t          j                  |
d<   |                    |
d                    |                    |
d                    |d         r|                    |
d                    |d         r|
d         j        rt          d          |d         r1t          j        |d                   r|d         st          d          |                    |
d                    )|S )NFr"   zJAdam does not support sparse gradients, please consider SparseAdam insteadr   r   r!    )rH   r-   r)   rG   rF   )memory_formatexp_avg
exp_avg_sqr'   max_exp_avg_sqr    zB`requires_grad` is not supported for `step` in differentiable moder   r#   r*   )gradr/   
is_complexappend	is_sparser>   rL   rN   zerosrR   r-   rP   
zeros_likepreserve_formatrequires_gradrO   )rB   rS   params_with_gradgradsexp_avgsexp_avg_sqsmax_exp_avg_sqsstate_stepshas_complexr3   rL   s              r5   _init_groupzAdam._init_groupP   s<    x '	2 '	2Av!u/222 ''***6# u&'stttQV$$$
1u::?? !.D27.DBemAHMMMM"\#U]CCC &M (-'7I^'_'_'_E)$*/*:1ELa*b*b*bE,'Y' k272B1TYTi2j2j2j./i 0111""5#6777# D#**51A+BCCC)* muV}/J m&'klll # pd(D(D pUS_M` p&'nooo""5=111r7   c                    |                                   d}|5t          j                    5   |            }ddd           n# 1 swxY w Y   | j        D ]}g }g }g }g }g }g }	|d         \  }
}|                     |||||||	          }t          ||||||	f|d         ||
||d         |d         |d         |d         |d         |d	         |d
         |d         t          | dd          t          | dd          d |S )zPerform a single optimization step.

        Args:
            closure (Callable, optional): A closure that reevaluates the model
                and returns the loss.
        Nr$   r'   r#   r&   r%   r   r   r   r    r!   
grad_scale	found_inf)r'   rk   beta1beta2r#   r&   r%   r   r   r   r    r!   rn   ro   ) _cuda_graph_capture_health_checkr/   enable_gradrA   rl   r   getattr)rB   closurelossrS   re   rf   rg   rh   ri   rj   rp   rq   rk   s                r5   rF   z	Adam.step   s    	--///"$$ ! !wyy! ! ! ! ! ! ! ! ! ! ! ! ! ! ! & '	 '	E!EHK OK >LE5**  K   i(';">2%Lz*i( .$%56Gn"4t<<!$T::)    . s   AA
A)r   r   r   r   FN)__name__
__module____qualname__r
   r   rQ   r   r   boolr   r=   rI   rl   r   rF   __classcell__)rD   s   @r5   r   r      sV        -1.:"'(!&0W ,0"'$)(-)-0W 0W 0W 0W5&=)0W eUl+0W 	0W
  %0W 0W #4.0W  0W "0W "&0W !0W 0W 0W 0W 0W 0WdP P P P P3 3 3j "7 7 7 "!7 7 7 7 7r7   a  Implements Adam algorithm.

    .. math::
       \begin{aligned}
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{input}      : \gamma \text{ (lr)}, \beta_1, \beta_2
                \text{ (betas)},\theta_0 \text{ (params)},f(\theta) \text{ (objective)}          \\
            &\hspace{13mm}      \lambda \text{ (weight decay)},  \: \textit{amsgrad},
                \:\textit{maximize}                                                              \\
            &\textbf{initialize} :  m_0 \leftarrow 0 \text{ ( first moment)},
                v_0\leftarrow 0 \text{ (second moment)},\: \widehat{v_0}^{max}\leftarrow 0\\[-1.ex]
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{for} \: t=1 \: \textbf{to} \: \ldots \: \textbf{do}                         \\

            &\hspace{5mm}\textbf{if} \: \textit{maximize}:                                       \\
            &\hspace{10mm}g_t           \leftarrow   -\nabla_{\theta} f_t (\theta_{t-1})         \\
            &\hspace{5mm}\textbf{else}                                                           \\
            &\hspace{10mm}g_t           \leftarrow   \nabla_{\theta} f_t (\theta_{t-1})          \\
            &\hspace{5mm}\textbf{if} \: \lambda \neq 0                                           \\
            &\hspace{10mm} g_t \leftarrow g_t + \lambda  \theta_{t-1}                            \\
            &\hspace{5mm}m_t           \leftarrow   \beta_1 m_{t-1} + (1 - \beta_1) g_t          \\
            &\hspace{5mm}v_t           \leftarrow   \beta_2 v_{t-1} + (1-\beta_2) g^2_t          \\
            &\hspace{5mm}\widehat{m_t} \leftarrow   m_t/\big(1-\beta_1^t \big)                   \\
            &\hspace{5mm}\widehat{v_t} \leftarrow   v_t/\big(1-\beta_2^t \big)                   \\
            &\hspace{5mm}\textbf{if} \: amsgrad                                                  \\
            &\hspace{10mm}\widehat{v_t}^{max} \leftarrow \mathrm{max}(\widehat{v_t}^{max},
                \widehat{v_t})                                                                   \\
            &\hspace{10mm}\theta_t \leftarrow \theta_{t-1} - \gamma \widehat{m_t}/
                \big(\sqrt{\widehat{v_t}^{max}} + \epsilon \big)                                 \\
            &\hspace{5mm}\textbf{else}                                                           \\
            &\hspace{10mm}\theta_t \leftarrow \theta_{t-1} - \gamma \widehat{m_t}/
                \big(\sqrt{\widehat{v_t}} + \epsilon \big)                                       \\
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
            &\bf{return} \:  \theta_t                                                     \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
       \end{aligned}

    For further details regarding the algorithm we refer to `Adam: A Method for Stochastic Optimization`_.
    a  
    Args:
        params (iterable): iterable of parameters to optimize or dicts defining
            parameter groups
        lr (float, Tensor, optional): learning rate (default: 1e-3). A tensor LR
            is not yet supported for all our implementations. Please use a float
            LR if you are not also specifying fused=True or capturable=True.
        betas (Tuple[float, float], optional): coefficients used for computing
            running averages of gradient and its square (default: (0.9, 0.999))
        eps (float, optional): term added to the denominator to improve
            numerical stability (default: 1e-8)
        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
        amsgrad (bool, optional): whether to use the AMSGrad variant of this
            algorithm from the paper `On the Convergence of Adam and Beyond`_
            (default: False)
        z	
        z
    .. _Adam\: A Method for Stochastic Optimization:
        https://arxiv.org/abs/1412.6980
    .. _On the Convergence of Adam and Beyond:
        https://openreview.net/forum?id=ryQu7f-RZ

    Fr"   rf   rg   rh   ri   rj   r   r   r    r!   rn   ro   rk   r'   rp   rq   r#   r&   r%   r   c                   |	2|0t          | |d          \  }}|rt          |t                    r|sd}|	d}	|d}t          j                                        s(t          d |D                       st          d          |r-t          j        	                                rt          d          |	r-t          j        	                                rt          d          |	r&t          j        	                                st          }n/|r&t          j        	                                st          }nt          } || ||||||||||||||||
|           dS )	znFunctional API that performs Adam algorithm computation.

    See :class:`~torch.optim.Adam` for details.
    NF)	use_fusedc              3   J   K   | ]}t          |t          j                  V  d S rw   )r:   r/   r   )r1   ts     r5   r6   zadam.<locals>.<genexpr>-  s/      2d2dST:a3N3N2d2d2d2d2d2dr7   zPAPI has changed, `state_steps` argument must contain a list of singleton tensorsz6torch.jit.script not supported with foreach optimizersz4torch.jit.script not supported with fused optimizers)r'   rk   rp   rq   r#   r&   r%   r   r   r    rn   ro   )r   r:   r   r/   _utilsis_compilingr@   r>   jitis_scripting_fused_adam_multi_tensor_adam_single_tensor_adam)r"   rf   rg   rh   ri   rj   r   r   r    r!   rn   ro   rk   r'   rp   rq   r#   r&   r%   r   _funcs                         r5   r   r     s   > }1&.TYZZZ
7 	z"f-- 	j 	G} <$$&& os2d2dXc2d2d2d/d/d omnnn U59))++ USTTT S'')) SQRRR #UY++-- #	 #//11 #!"D					 "&#     r7   c       
            ||J t           j                                        rt          |t                    sJ t          |           D ]\  }}|s||         n||          }||         }||         }||         }t           j                                        s(|r&|j        r|j        s|j	        r|j	        s
J d            |dz  }|dk    r|
                    ||          }t          j        |          rot          j        |          }t          j        |          }t          j        |          }|rt          j        ||                   ||<   t          j        |          }|                    |d|
z
             |                    |                              ||                                d|z
             |s|r|}d|
|z  z
  }d||z  z
  }||z  }|                                }|                                }|r|r||                                         }n||         }||                             t          j        ||                     ||                                         ||z  z                      ||z            }n0|                                ||z  z                      ||z            }|                    ||           nt1          |          }d|
|z  z
  }d||z  z
  }||z  }t3          |          }|rTt          j        ||         |||                    ||                                         |z                      |          }n*|                                |z                      |          }|                    |||            |r7t          j        | |                   rt          j        ||                   ||<   d S )NzGIf capturable=True, params and state_steps must be CUDA or XLA tensors.r   r   alpha)value)out)r/   r   r   r:   rQ   	enumerater   r   is_cudais_xlaaddr^   view_as_reallerp_mul_addcmul_conjnegsqrtclonecopy_maximumadd_addcdiv_r   r   view_as_complex) r"   rf   rg   rh   ri   rj   rn   ro   r'   rk   rp   rq   r#   r&   r%   r   r   r    iparamr]   rZ   r[   step_trF   bias_correction1bias_correction2	step_sizestep_size_negbias_correction2_sqrtr\   denoms                                    r5   r   r   P  sD   ( )"3"33y % "e$$$$$f%% QK QK5'6uQxxeAhY1+ ^
Q |((** 	Yz 	YY#)>Y7<|YHNY YXY YV 	!188E866DE"" 	.%d++D(11G+J77J L%*%78J%K%K"&u--E 	dAI&&&''diikkU'KKK /	= /	=D 5D=0 5D=0--I%MMOOM$4$9$9$;$;! p! 8%4Q%7%=%=%?%?NN%4Q%7N"((~z)R)RSSS
 )+00226Km6[\bbcfivcvww#**.Cm.STZZ[^an[nooNN7E****f%%D 5D=0 5D=0--I$23C$D$D! Noa0*/RSBTUUUU )+00225JJPPQTUU#**-BBHHMMNN7E)N<<<  	Ku'q	22 	K!&!6q7I!J!JOAcQK QKr7   c       
           
 t          |           dk    rd S t          t                    r|st          d          t          j                                        s3|r1t          d t          | |          D                       s
J d            ||J |r
J d            t          j
        | |||||g          }|                                D ]"\  \  }}}}}}}|rt	          j        |          }|	r(|rt          |||||           nt          ||||           |d         j        r,t	          j        |t	          j        dd          d	           nt	          j        |d
           |dk    r1|rt	          j        |||	           nt	          j        |||	          }t	          j        ||d

z
             t	          j        |           t	          j        |||d
z
             ~|r@t	          j        
|          }t	          j        |          }t	          j        |d
           t	          j        |d
           t	          j        |           t	          j        |           t	          j        |           t	          j        |           |}|}|r*t	          j        ||           t	          j        |          }nt	          j        |          }t	          j        ||           t	          j        ||           t	          j        ||           t	          j        |||           ^
fd|D             }fd|D             }t=          fd|D                       }d |D             }|r*t	          j        ||           t	          j        |          }nt	          j        |          }t	          j        ||           t	          j        ||           t	          j        ||||           $d S )Nr   r*   c              3   8   K   | ]\  }}|j         o|j         V  d S rw   )r   )r1   r3   rF   s      r5   r6   z%_multi_tensor_adam.<locals>.<genexpr>  s/      VV'!T19-VVVVVVr7   z@If capturable=True, params and state_steps must be CUDA tensors.z#_foreach ops don't support autogradr+   cpu)r-   r   r   c                 :    g | ]}d t          |          z  z
  S r   r   )r1   rF   rp   s     r5   
<listcomp>z&_multi_tensor_adam.<locals>.<listcomp>5  +    ]]]$EZ-=-=$= =]]]r7   c                 :    g | ]}d t          |          z  z
  S r   r   )r1   rF   rq   s     r5   r   z&_multi_tensor_adam.<locals>.<listcomp>6  r   r7   c                      g | ]
}|z  d z  S )rX   )r1   bcr#   s     r5   r   z&_multi_tensor_adam.<locals>.<listcomp>8  s!    ,W,W,Wb2g^,W,W,Wr7   c                 ,    g | ]}t          |          S rX   )r   )r1   r   s     r5   r   z&_multi_tensor_adam.<locals>.<listcomp>:  s     $S$S$SB^B%7%7$S$S$Sr7   )rN   r:   r   r>   r/   r   r   r@   zipr	   "_group_tensors_by_device_and_dtyperM   _foreach_negr   is_cpu_foreach_add_rP   _foreach_add_foreach_lerp__foreach_mul__foreach_addcmul__foreach_pow_foreach_sub__foreach_neg__foreach_div__foreach_reciprocal__foreach_sqrt__foreach_maximum__foreach_sqrt_foreach_addcdiv_r   )r"   rf   rg   rh   ri   rj   rn   ro   r'   rk   rp   rq   r#   r&   r%   r   r   r    grouped_tensorsdevice_paramsdevice_gradsdevice_exp_avgsdevice_exp_avg_sqsdevice_max_exp_avg_sqsdevice_state_stepsr   r   r   r   r   exp_avg_sq_sqrts             ```                  r5   r   r     s   & 6{{a"f dj dbccc <$$&& O: OVVS=U=UVVVVV 	O 	ON	O 	OV )"3"33DDDDDB	+LN NO ##%%c` c` 	
 
 	< -l;;L  	` `m\?L^`vwwwwm\?L^___ a ' 	7 2ELU4S4S4S[^_____ 2A6661 c#L-|TTTTT$1,Uabbb 	_lAIFFF.666 2L,PQTYPYZZZ  7	`$1%9KLL$1%9KLL 0!444 0!444 0111  0"555&'7888 !1222
 )I$4! J'(>@RSSS #("56L"M"M"'"56H"I"I1FGGG555;;; #M?OTTTT]]]]J\]]]]]]]J\]]]+,W,W,W,WFV,W,W,WXXI$S$SBR$S$S$S! J'(>@RSSS #("56L"M"M"'"56H"I"I1FGGG555#M?OU^____Gc` c`r7   returnc       
            | sd S |rt          d          |	|j        |ind }|	|j        |ind }t          |t                    r!t	          |j                  dk    r	|j        |ind }t          j        | |||||g          }|                                D ]\  \  }}\  \  }}}}}}}d\  }}|&||vr|                    |d          ||<   ||         }|&||vr|                    |d          ||<   ||         }|&||vr"|                    |d          ||<   ||         }t          j
        |d           t          j        |||||||||
||||||           |&t          j        ||gt          |          z             d S )	Nz9Adam with fused=True does not support differentiable=Truer   )NNT)non_blocking)r-   r   r   )	r'   r#   rp   rq   r&   r%   r   rn   ro   )r>   r-   r:   r   strr	   r   itemstor/   r   _fused_adam_r   rN   ) r"   rf   rg   rh   ri   rj   rn   ro   r'   rk   rp   rq   r#   r&   r%   r   r   r    grad_scale_dictfound_inf_dictlr_dictr   r-   r   r   r   r   r   r   r   device_grad_scaledevice_found_infs                                    r5   r   r   J  s7   *   XVWWW9C9Oz(*55UYO6?6Ki&	22QUN ",B!7!7]C	NNe<S<Sry"ooY]GB	+LN NO 4C3H3H3J3J%b %b 	0 0 ,}#&)-)Q.8++!_,,*4--T-*R*R' / 7 ..)2f4)P)Pv&-f56#8#8 ee6eEEGFOB.222"%(&	
 	
 	
 	
" ' 25E4FM_I`I`4`aaaK%b %br7   )NFFNNNF) typingr   r   r   r   r/   r   	optimizerr	   r
   r   r   r   r   r   r   r   r   r   r   r   torch.utils._foreach_utilsr   __all__r   __doc__r{   rQ   r   r   r   r   rX   r7   r5   <module>r      s7   / / / / / / / / / / / /       6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 L K K K K K6
o o o o o9 o o od&L 
   
! " 
# $ 
% & 
'  M?T $(! %!%(,'+"K Kf KVK<K 6lK v,	K
 6lK 4.K K K K f%K V$K K  !K" #K$ %K& 5&=!'K( )K* +K, -K K K K\mKV mK#F|mK"&v,mK &*&\mK *.f	mK
 &*&\mK %-V$4mK $,F#3mK "&mK &*mK  %mK  %mK "%-0mK ',mK #mK  #'!mK" %)#mK$ )-%mK mK mK mK`G`tF| G`"6lG`!%fG` %)LG` )-V	G`
 %)LG` $,F#3G` #+6"2G` !%G` %)G` $G` $G` !/G` &+G` "G`  "&!G`" $(#G`$ (,%G` G` G` G`THbLHb<Hb 6lHb f	Hb
 &\Hb fHb  Hb Hb Hb Hb Hb Hb 	eVmHb Hb  
!Hb" #Hb$ %Hb& 'Hb( 
)Hb Hb Hb Hb Hb Hbr7   