
    ¯wgF             
       L   U d dl Z d dlZd dlZd dlZd dlZd dlmZ d dl d dlZd dlm	Z	 d dl
Z
d dlmc mZ d dlmZ d dlmZ d dlmZmZ d dlmZ g d	Zd
Zdaee   ed<   e j<                  dee   fd       Zde e!df   de e
jD                  df   fdZ#de$e!   ddfdZ%dPdZ& G d d      Z'd Z(de e)e*   e)e
jD                     f   fdZ+dddQdZ,dPdZ- G d de
j\                  j^                        Z0d Z1e
jd                  de1eddd ee   d!e3g e e4e4f   f   d"e5d#efd$       Z6dRd%Z7d& Z8d'a9e j<                  d(efd)       Z: G d* d+      Z; G d, d-      Z< G d. d/e
j\                  j^                        Z= G d0 d1      Z>d2Z? G d3 d4e@      ZAde e3g e!f   e3eAgdf   f   fd5ZBd6e
jD                  deCe5e!f   fd7ZDeeDd8d9 iZEeCe5e3e
jD                  ge!f   f   ed:<    G d; d<eF      ZG G d= d>e
j\                  j                  j                        ZJ G d? d@e
j\                  j                  j                        ZKdA ZL G dB dC      ZMdD ZN G dE dF      ZO G dG dHej                        ZQdI ZRe
j                  j                  j                  j                  e
j                  j                  j                  j                  h eYe
j                  j                  j                  j                        z  Z^ G dJ dKe      Z_ G dL dMe      Z`dSdNZad'e1edfd!e3g e e4e4f   f   d"e5d#efdOZby)T    N)defaultdict)*)ReferenceType)is_fun)tree_map)capture_logsLoggingTensorMode)TorchDispatchMode)
checkpointcheckpoint_sequentialCheckpointErrorCheckpointFunctioncheck_backward_validitydetach_variableget_device_statesset_device_statesnoop_context_fnset_checkpoint_early_stopDefaultDeviceTypeset_checkpoint_debug_enabledCheckpointPolicySelectiveCheckpointContext$create_selective_checkpoint_contextsSAC_IGNORED_OPSdefault_checkpoint_debug_enabledenabledc              #   8   K   	 t         }| a d |a y# a w xY ww)a  
    Context manager that sets whether checkpoint should print additional debug
    information when running. See the ``debug`` flag for
    :func:`~torch.utils.checkpoint.checkpoint` for more information. Note that
    when set, this context manager overrides the value of ``debug`` passed to
    checkpoint. To defer to the local setting, pass ``None`` to this context.

    Args:
        enabled (bool): Whether checkpoint should print debug information.
            Default is 'None'.
    N)r   )r   prevs     [/home/mcse/projects/flask/flask-venv/lib/python3.12/site-packages/torch/utils/checkpoint.pyr   r   ,   s%     )($+!$(!D!    inputs.returnc                 D   t        | t              rrg }| D ]`  }t        |t        j                        s|j	                  |       /|j                         }|j                  |_        |j	                  |       b t        |      S t        dt        |       j                        )Nz@Only tuple of tensors is supported. Got Unsupported input type: )

isinstancetupletorchTensorappenddetachrequires_gradRuntimeErrortype__name__)r"   outinpxs       r    r   r   B   s    &%  	Cc5<<0

3

A!//AOJJqM	 SzNL!!
 	
    c                 T    t        d | D              st        j                  d       y y )Nc              3   j   K   | ]+  }t        |t        j                        s|j                   - y wN)r%   r'   r(   r+   .0r0   s     r    	<genexpr>z*check_backward_validity.<locals>.<genexpr>V   s#     TSjell6Ss  Ts   33zBNone of the inputs have requires_grad=True. Gradients will be None)anywarningswarn)r"   s    r    r   r   U   s%    TFTTP	
 Ur2   c                 Z    | dk(  rt        j                  d      S t        t         |       }|S )Nmeta)r'   devicegetattr)r>   device_modules     r    _get_device_modulerA   \   s+    ||F##E6*Mr2   c                   B    e Zd ZdZdZeddefd       Zedefd       Zy)	r   aA  
    A class that manages the default device type for checkpointing.

    If no non-CPU tensors are present, the default device type will
    be used. The default value is 'cuda'. The device type is used in
    the checkpointing process when determining which device states
    to save and restore for recomputation.
    cudar>   c                     | t         _        y)z
        Set the default device type for checkpointing.

        Args:
            device (str): The device type to be set as default. Default is 'cuda'.
        Nr   _default_device_type)r>   s    r    set_device_typez!DefaultDeviceType.set_device_typeo   s     28.r2   r#   c                  "    t         j                  S )z
        Get the current default device type for checkpointing.

        Returns:
            str: The current default device type.
        rE    r2   r    get_device_typez!DefaultDeviceType.get_device_typey   s     !555r2   NrC   )	r.   
__module____qualname____doc__rF   staticmethodstrrG   rJ   rI   r2   r    r   r   c   sC     "8 8 8 6S 6 6r2   r   c                     g fd}t        ||        t              }t        |      dkD  r't        j                  dt        |       dd           t              dk(  rt        j                         S d|v ryd   S )Nc                     t        | t        j                        r@| j                  j                  dk(  s&j                  | j                  j                         y y y )Ncpu)r%   r'   r(   r>   r-   r)   )argdevice_typess    r    add_device_typesz,_infer_device_type.<locals>.add_device_types   s?    c5<<(E1I

0 2J(r2      a  Tensor arguments, excluding CPU tensors, are detected on at least two types of devices. Device state will only be saved for devices of a single device type, and the remaining devices will be ignored. Consequently, if any checkpointed functions involve randomness, this may result in incorrect gradients. (Note that if CUDA devices are among the devices detected, it will be prioritized; otherwise, the first device encountered will be selected.)
Device types: z first device type: r   rC   )r   setlenr:   r;   sortedr   rJ   )argsrV   device_types_setrU   s      @r    _infer_device_typer]      s    L1 t$<(
q 
  &&6788L\Z[_L]_	
 <A 0022	#	#Ar2   c                      g fd}t        ||        g }t        t        |        }D ];  }|j                  |      5  |j	                  |j                                d d d        = |fS # 1 sw Y   KxY w)Nc                     t        | t        j                        r9| j                  j                  dvr j                  | j                                y y y )N>   rS   r=   )r%   r'   r(   r>   r-   r)   
get_device)rT   fwd_device_idss    r    add_device_idsz)get_device_states.<locals>.add_device_ids   s>    c5<<(SZZ__O-S!!#.."23 .T(r2   )r   rA   r]   r>   r)   get_rng_state)r[   rb   fwd_device_statesr@   	device_idra   s        @r    r   r      s     N4 ^T"&'94'@AM# D	!!), 	D$$]%@%@%BC	D 	DD ,,,	D 	Ds    A,,A5	device_typec                    |t         j                         }|dk(  ryt        |      }t        | |      D ]0  \  }}|j	                  |      5  |j                  |       ddd       2 y# 1 sw Y   =xY w)a  Sets random number generator states for the specified devices.

    Args:
        devices: Device ids to set states for.
        states: States to set.
        device_type: ``device_type`` of the devices to set states for. Default
            is the device returned by a call to ``DefaultDeviceType.get_device_type()``,
            which is ``cuda`` if not changed by calling ``DefaultDeviceType::set_device_type()``.
    Nr=   )r   rJ   rA   zipr>   set_rng_state)devicesstatesrg   r@   r>   states         r    r   r      s}     '779f&{3MWf- /!!&) 	/''.	/ 	//	/ 	/s   A((A1	c                 F   t         j                  j                  |       r?t        j                  |       t        j                  |       t        j
                         d}nd }t        j                  d      t        j                  d      t        j
                         d}||fS )N)r   dtypecache_enabledrS   )r'   ampis_autocast_availableis_autocast_enabledget_autocast_dtypeis_autocast_cache_enabled)rg   device_autocast_kwargscpu_autocast_kwargss      r    _get_autocast_kwargsrx      s    yy&&{300=--k:"<<>"
 "& ,,U3))%088: "#666r2   c                   ,    e Zd Zed        Zed        Zy)r   c                    t        |       || _        || _        t        | | _        t        | j                        \  | _        | _        |r^t        j                         | _
        d| _        t        | j                        }t        |dd      rd| _        t        | \  | _        | _        g | _        g | _        g }t'        |      D ]}  \  }}t        j(                  |      rH|j+                  |       | j$                  j+                  |       | j"                  j+                  d        c| j"                  j+                  |         | j,                  |  t        j.                         5   || }d d d        |S # 1 sw Y   S xY w)NF_initializedT)r   run_functionpreserve_rng_stater]   rg   rx   rv   rw   r'   rc   fwd_cpu_statehad_device_in_fwdrA   r?   r   fwd_devicesrd   r"   tensor_indices	enumerate	is_tensorr)   save_for_backwardno_grad)	ctxr|   r}   r[   r@   tensor_inputsirT   outputss	            r    forwardzCheckpointFunction.forward   sV   %'!3,d3>ROO?
;"C$;  % 3 3 5C
 %*C!.s?M}ne<(,%9JD9Q6!6 
o 	'FAss#$$S)""))!,

!!$'

!!#&	' 	}-]]_ 	*"D)G	*	*s   'E77Fc           	      n   t         j                  j                         st        d      t	        | j
                        }| j                  }| j                  }t        |      D ]  \  }}||   ||<    g }| j                  r| j                  r| j                  }t         j                  j                  || j                  | j                        5  | j                  rWt        j                  | j                          | j                  r,t#        | j                  | j$                  | j                         t'        t)        |            }t         j*                  j-                  | j                        r5t        j*                  j.                  d	d| j                  i| j0                  nt3        j4                         }	t        j6                         5  |	5  t        j*                  j.                  d
i | j8                  5   | j:                  | }
d d d        d d d        d d d        d d d        t=        
t         j>                        r|
f}
g }g }tA        tC        |
            D ]S  }t        jD                  |
|         s|
|   jF                  s,|jI                  |
|          |jI                  ||          U tC        |      dk(  rt        d      t         j                  jK                  ||       t)        d D              }d|z   S # 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   xY w)NzWhen use_reentrant=True, torch.utils.checkpoint is incompatible with .grad() or passing an `inputs` parameter to .backward(). To resolve this error, you can either set use_reentrant=False, or call .backward() without passing the `inputs` argument.rk   r   rg   rf   rg   r   zInone of output has requires_grad=True, this checkpoint() is not necessaryc              3   l   K   | ],  }t        |t        j                        r|j                  nd  . y wr5   )r%   r'   r(   gradr6   s     r    r8   z.CheckpointFunction.backward.<locals>.<genexpr>B  s.      
 #35CHH4?
s   24)NNrI   rS   )&r'   autograd_is_checkpoint_validr,   listr"   r   saved_tensorsr   r}   r   r   randomfork_rngrg   rj   r~   r   rd   r   r&   rq   rr   autocastrv   
contextlibnullcontextenable_gradrw   r|   r%   r(   rangerY   r   r+   r)   backward)r   r[   r"   r   tensorsr   idxrng_devicesdetached_inputsdevice_autocast_ctxr   outputs_with_gradargs_with_gradgradss                 r    r   zCheckpointFunction.backward  s   ~~224N  cjj!++##  / 	%FAs!!*F3K	% !!c&;&;//K\\"")?)?S__ # 
 	= %%##C$5$56((%coos7L7LZ]ZiZij-eFm<O 00A #())"4"4 #OO#/2/I/I#GQG]G]G_   ""$ =&9 =599;M;M;oWZWnWn;o =*#**O<= = =	= gu||,jG s7|$ 	/Awqz*wqz/G/G!((4%%d1g.	/  !Q&6  	 1>B 
&
 

 e##1= = = = = =	= 	=sU   C>L*L*L9L		LLL*L	LLLL'	"L**L4N)r.   rL   rM   rO   r   r   rI   r2   r    r   r      s)    & &P ;$ ;$r2   r   c                  R    t        j                         t        j                         fS r5   )r   r   rI   r2   r    r   r   J  s    !!#Z%;%;%===r2   F)use_reentrant
context_fndeterminism_checkdebugr   r   r   r   c                   |t        j                  dd       d}|j                  dd      }|r&|r$t        ddj	                  d	 |D              z         |r.|t
        us|d
urt        d      t        j                  | |g| S t        | ||||g|i |}t        |        | |i |}		 t        |       y# t        $ r |	cY S w xY w)a_  Checkpoint a model or part of the model.

    Activation checkpointing is a technique that trades compute for memory.
    Instead of keeping tensors needed for backward alive until they are used in
    gradient computation during backward, forward computation in checkpointed
    regions omits saving tensors for backward and recomputes them during the
    backward pass. Activation checkpointing can be applied to any part of a
    model.

    There are currently two checkpointing implementations available, determined
    by the :attr:`use_reentrant` parameter. It is recommended that you use
    ``use_reentrant=False``. Please refer the note below for a discussion of
    their differences.

    .. warning::

        If the :attr:`function` invocation during the backward pass differs
        from the forward pass, e.g., due to a global variable, the checkpointed
        version may not be equivalent, potentially causing an
        error being raised or leading to silently incorrect gradients.

    .. warning::

        The ``use_reentrant`` parameter should be passed explicitly. In version
        2.4 we will raise an exception if ``use_reentrant`` is not passed.
        If you are using the ``use_reentrant=True`` variant, please refer to the
        note below for important considerations and potential limitations.

    .. note::

        The reentrant variant of checkpoint (``use_reentrant=True``) and
        the non-reentrant variant of checkpoint (``use_reentrant=False``)
        differ in the following ways:

        * Non-reentrant checkpoint stops recomputation as soon as all needed
          intermediate activations have been recomputed. This feature is enabled
          by default, but can be disabled with :func:`set_checkpoint_early_stop`.
          Reentrant checkpoint always recomputes :attr:`function` in its
          entirety during the backward pass.

        * The reentrant variant does not record the autograd graph during the
          forward pass, as it runs with the forward pass under
          :func:`torch.no_grad`. The non-reentrant version does record the
          autograd graph, allowing one to perform backward on the graph within
          checkpointed regions.

        * The reentrant checkpoint only supports the
          :func:`torch.autograd.backward` API for the backward pass without its
          `inputs` argument, while the non-reentrant version supports all ways
          of performing the backward pass.

        * At least one input and output must have ``requires_grad=True`` for the
          reentrant variant. If this condition is unmet, the checkpointed part
          of the model will not have gradients. The non-reentrant version does
          not have this requirement.

        * The reentrant version does not consider tensors in nested structures
          (e.g., custom objects, lists, dicts, etc) as participating in
          autograd, while the non-reentrant version does.

        * The reentrant checkpoint does not support checkpointed regions with
          detached tensors from the computational graph, whereas the
          non-reentrant version does. For the reentrant variant, if the
          checkpointed segment contains tensors detached using ``detach()`` or
          with :func:`torch.no_grad`, the backward pass will raise an error.
          This is because ``checkpoint`` makes all the outputs require gradients
          and this causes issues when a tensor is defined to have no gradient in
          the model. To avoid this, detach the tensors outside of the
          ``checkpoint`` function.

    Args:
        function: describes what to run in the forward pass of the model or
            part of the model. It should also know how to handle the inputs
            passed as the tuple. For example, in LSTM, if user passes
            ``(activation, hidden)``, :attr:`function` should correctly use the
            first input as ``activation`` and the second input as ``hidden``
        preserve_rng_state(bool, optional):  Omit stashing and restoring
            the RNG state during each checkpoint. Note that under torch.compile,
            this flag doesn't take effect and we always preserve RNG state.
            Default: ``True``
        use_reentrant(bool):
            specify whether to use the activation checkpoint variant that
            requires reentrant autograd. This parameter should be passed
            explicitly. In version 2.5 we will raise an exception if
            ``use_reentrant`` is not passed. If ``use_reentrant=False``,
            ``checkpoint`` will use an implementation that does not require
            reentrant autograd. This allows ``checkpoint`` to support additional
            functionality, such as working as expected with
            ``torch.autograd.grad`` and support for keyword arguments input into
            the checkpointed function.
        context_fn(Callable, optional): A callable returning a tuple of two
            context managers. The function and its recomputation will be run
            under the first and second context managers respectively.
            This argument is only supported if ``use_reentrant=False``.
        determinism_check(str, optional): A string specifying the determinism
            check to perform. By default it is set to ``"default"`` which
            compares the shapes, dtypes, and devices of the recomputed tensors
            against those the saved tensors. To turn off this check, specify
            ``"none"``. Currently these are the only two supported values.
            Please open an issue if you would like to see more determinism
            checks. This argument is only supported if ``use_reentrant=False``,
            if ``use_reentrant=True``, the determinism check is always disabled.
        debug(bool, optional): If ``True``, error messages will also include
            a trace of the operators ran during the original forward computation
            as well as the recomputation. This argument is only supported if
            ``use_reentrant=False``.
        args: tuple containing inputs to the :attr:`function`

    Returns:
        Output of running :attr:`function` on :attr:`*args`
    Nae  torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.   )
stacklevelTr}   Unexpected keyword arguments: ,c              3       K   | ]  }|  y wr5   rI   r7   rT   s     r    r8   zcheckpoint.<locals>.<genexpr>       7N7N   FzKPassing `context_fn` or `debug` is only supported when use_reentrant=False.)r:   r;   pop
ValueErrorjoinr   r   apply'_checkpoint_without_reentrant_generatornextStopIteration)
functionr   r   r   r   r[   kwargspreservegenrets
             r    r   r   W  s   r C 	
  zz.5H-,sxx7Nv7N/NN
 	
 _,U0B'  "''(BTBB5h
,=u
GK
OU
 	S	''	I 	J	s   -B9 9CCc                    |t        j                  d       d}|j                  dd      }|r$t        ddj	                  d |D              z         d }t        | t        j                  j                        rt        | j                               } t        |       |z  }d}t        d	||d
z
  z  |      D ]!  }	|	|z   d
z
  }t         ||	||       |||      }#   ||d
z   t        |       d
z
  |       |      S )a	  Checkpoint a sequential model to save memory.

    Sequential models execute a list of modules/functions in order
    (sequentially). Therefore, we can divide such a model in various segments
    and checkpoint each segment. All segments except the last will not store
    the intermediate activations. The inputs of each checkpointed segment will
    be saved for re-running the segment in the backward pass.

    .. warning::
        The ``use_reentrant`` parameter should be passed explicitly. In version
        2.4 we will raise an exception if ``use_reentrant`` is not passed.
        If you are using the ``use_reentrant=True` variant, please see
        :func:`~torch.utils.checkpoint.checkpoint` for
        the important considerations and limitations of this variant. It is
        recommended that you use ``use_reentrant=False``.

    .. warning:
        Since PyTorch 1.4, it allows only one Tensor as the input and
        intermediate outputs, just like :class:`torch.nn.Sequential`.

    Args:
        functions: A :class:`torch.nn.Sequential` or the list of modules or
            functions (comprising the model) to run sequentially.
        segments: Number of chunks to create in the model
        input: A Tensor that is input to :attr:`functions`
        preserve_rng_state(bool, optional):  Omit stashing and restoring
            the RNG state during each checkpoint.
            Default: ``True``
        use_reentrant(bool):
            specify whether to use the activation checkpoint variant that
            requires reentrant autograd. This parameter should be passed
            explicitly. In version 2.5 we will raise an exception if
            ``use_reentrant`` is not passed. If ``use_reentrant=False``,
            ``checkpoint`` will use an implementation that does not require
            reentrant autograd. This allows ``checkpoint`` to support additional
            functionality, such as working as expected with
            ``torch.autograd.grad`` and support for keyword arguments input into
            the checkpointed function.

    Returns:
        Output of running :attr:`functions` sequentially on :attr:`*inputs`

    Example:
        >>> # xdoctest: +SKIP("stub")
        >>> model = nn.Sequential(...)
        >>> input_var = checkpoint_sequential(model, chunks, input_var)
    a{  torch.utils.checkpoint.checkpoint_sequential: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.Tr}   r   r   c              3       K   | ]  }|  y wr5   rI   r   s     r    r8   z(checkpoint_sequential.<locals>.<genexpr>8  r   r   c                       fd}|S )Nc                 F    t        dz         D ]  } |   |       }  | S NrW   )r   )inputjend	functionsstarts     r    r   z<checkpoint_sequential.<locals>.run_function.<locals>.forward<  s2    5#'* ,$	!U+,Lr2   rI   )r   r   r   r   s   ``` r    r|   z+checkpoint_sequential.<locals>.run_function;  s    	
 r2   r   rW   )r   r}   )r:   r;   r   r   r   r%   r'   nn
Sequentialr   childrenrY   r   r   )
r   segmentsr   r   r   r   r|   segment_sizer   r   s
             r    r   r     s   ` C	
  zz.5H,sxx7Nv7N/NN
 	
 )UXX001++-.	y>X-L
Cq,(Q,7F 
l"Q&Y/''	

 @<aY!!3Y?FFr2   c                     | st        d      y )NzqSomething went unexpectedly wrong in activation checkpoint. Please report this bug by filing an issue to PyTorch.AssertionError)conds    r    _internal_assertr   T  s    D
 	
 r2   Tenablec              #   8   K   	 t         }| a d |a y# a w xY ww)a)  Context manager that sets whether checkpoint should stop recomputation early.

    By default, non-reentrant checkpoint stops recomputation as soon as it
    has computed all needed Tensors. This context manager can be used to disable
    that feature if it is problematic for your specific application.

    This context manager only needs to be active when forward is run. It does
    not need to be active during backward.

    Example::

    >>> # xdoctest: +SKIP(failing)
    >>> message = "saved tensors default hooks are disabled"
    >>> with set_checkpoint_early_stop(False):
    ...     # Any checkpoint under this context manager will respect this
    ...     # context manager, even if its backward is performed outside.
    ...     out = checkpoint(fn, inputs)
    ...
    >>> out.backward()
    N)_enable_checkpoint_early_stop)r   r   s     r    r   r     s%     .-,(.%(,%%r!   c                       e Zd Zy)_HandleNr.   rL   rM   rI   r2   r    r   r         r2   r   c                       e Zd Zd Zy)_Holderc                     i | _         y r5   )handlesselfs    r    __init__z_Holder.__init__  s	    57r2   N)r.   rL   rM   r   rI   r2   r    r   r     s    8r2   r   c            	       X    e Zd Zed        Zededeedf   deddfd       Zed	        Zy)
_NoopSaveInputsc                  ,    t        j                  d      S )Nr   )r'   empty)r[   s    r    r   z_NoopSaveInputs.forward  s    {{4  r2   r   r"   .outputr#   Nc           
        	
 t        t        |      D cg c]$  \  }}t        |t        j                        s!||f& c}} \  }t              D ci c]  \  }}||
 c}}
|D cg c]   }t        |t        j                        rd n|" c}		
fd}|| _         | j                  |  y c c}}w c c}}w c c}w )Nc                 l    t              D cg c]  \  }}|v r| |      n| }}}|dd  S c c}}w r   )r   )r   r   or   r[   idx2saved_idxr   s       r    get_argsz/_NoopSaveInputs.setup_context.<locals>.get_args  sR     &dOAq 453FmA./AMC  qr7Ns   0)ri   r   r%   r'   r(   r   r   )r   r"   r   r   r   r   abr   r[   r   r   s            @@@r    setup_contextz_NoopSaveInputs.setup_context  s     #&!*6!2RAjELL6Qq!fR#
 +4N*CD$!QADDJKq
1ell3:K	  w') SDKs   "B1
B1
B7+%B=c                     t        d      )Nz(Did not expect to backward on this graphr   )r   grad_outputss     r    r   z_NoopSaveInputs.backward*  s    GHHr2   )	r.   rL   rM   rO   r   AnyTupler   r   rI   r2   r    r   r   
  sb    ! ! (3 (c3h ( ( ( (4 I Ir2   r   c                       e Zd Zd Zd Zy)_CheckpointFramec                    || _         d | _        g | _        t        t        j
                        | _        t        t              | _        t        t              | _
        || _        || _        || _        g | _        d| _        d| _        y NF)recompute_fninput_saverweak_holdersr   weakrefWeakKeyDictionary
recomputedintrecomp_counterboolis_recomputed
early_stopmetadata_fnunpack_error_cbx_metadatasforward_completedignore_saved_mismatch)r   r   r  r  r  s        r    r   z_CheckpointFrame.__init__0  s    (13 112 	
 6A5E5@5F % '.!&%*"r2   c           	         | j                   ry t        | j                        | j                  |   k(  s1t	        dt        | j                         d| j                  |          g }t        | j                        D ]  \  }} |       }|t        ||j                  v        t        |j                  |   d u       t        |j                  |   | j                  |   v        | j                  |   }| j                  |   |j                  |      }|| j                  |      k7  s|j                  ||| j                  |      f        t        |      dkD  r*d}|D ]  \  }}}	|d| d| d|	 dz  } t	        d	|       y )
Nztorch.utils.checkpoint: A different number of tensors was saved during the original forward and recomputation.
Number of tensors saved during forward: z/
Number of tensors saved during recomputation: r    ztensor at position z:
saved metadata: z
recomputed metadata: 
zztorch.utils.checkpoint: Recomputed values for the following tensors have different metadata than during the forward pass.
)r  rY   r   r   r   r   r   r   r   r  r  r)   )
r   gidnb_meta_differentr   weak_holderholderx_metarecomputed_xmismatched_tensorsrecomputed_metas
             r    check_recomputed_tensors_matchz/_CheckpointFrame.check_recomputed_tensors_matchI  s   %%  4$$%)<)<S)AA ";;>t?P?P;Q:R SAAEATATUXAY@Z\   )$*;*; < 	XC ]F~
 SFNN23 V^^C0<=V^^C0DOOC4HHI%%c*F??3/s0CDL)),77!((#vt7G7G7U)VW%	X(  !A%!#0A ,V_")# /''-h /,,;+<B@" "J%&(  &r2   N)r.   rL   rM   r   r  rI   r2   r    r   r   /  s    +2Br2   r   a
   An error happened while unpacking tensors; dumping logs of latest computation
because you passed `debug=True` to `torch.utils.checkpoint.checkpoint()`.
Scroll all the way down for guidance on how to navigate these logs.

+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|        1. Stack traces of the operators that ran in the original forward     |
+------------------------------------------------------------------------------+

{forward_traces}
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|        2. Stack traces of the operators that ran during recomputation        |
+------------------------------------------------------------------------------+

{recompute_traces}
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|       3. Log of operators in the original forward and recomputation          |
+------------------------------------------------------------------------------+
(Scroll up to correlate stack traces with each operation listed below. This
 helps identify their source in the code.)

IMPORTANT: Differences in "detach" calls between the original forward and the
           recomputation are expected. They are introduced by the checkpointing
           mechanism and can be ignored.

Operations executed during the original forward:

{forward_ops}

Operations executed during recomputation:

{recompute_ops}

+------------------------------------------------------------------------------+
 ERROR: Detected non-determinism while running activation checkpointing

 You are seeing this error because you passed `debug=True` to checkpoint and
 tensors to be saved during the original forward and differ between those saved
 during recomputation. This can happen if different operators were ran in the
 original forward and in the recomputation.

 To identify where the mismatch may be coming from, you can do the following:

 1) Compare the operators ran during original forward and recomputation to
    see where they differ. These operators are printed above in the order they
    were executed.

 2) Review the stack trace for each operator to locate its invocation source.
    Each operator's stack trace is printed in their execution order.

 Note that the logs can be quite long. Here's how they are structured:
 (Tip: you can Ctrl-f for these headers)

 1. Stack traces of the operators that ran in the original forward
 2. Stack traces of the operators that ran during recomputation
 3. Log of operators in the original forward and recomputation
 4. Error message                                             <--- You are here
--------------------------------------------------------------------------------
c                       e Zd Zy)r   Nr   rI   r2   r    r   r     r   r2   r   c                      t        j                         dk(  xr t        j                         dk(   G fdd      }  |         |        dt        ffd}fd}||fS )Nx86_64Linuxc                       e Zd Zd Z fdZy)._get_debug_context_and_cb.<locals>.CaptureLogsc                      d | _         d | _        y r5   )logstbsr   s    r    r   z7_get_debug_context_and_cb.<locals>.CaptureLogs.__init__  s    DIDHr2   c                 F     t         j                   fd       } |       S )Nc               3      K   t               5  t        ddd      5 } | \  _        _        |  d d d        d d d        y # 1 sw Y   xY w# 1 sw Y   y xY ww)NT)	python_tb	script_tbcpp_tb)r	   r   r  r  )logs_and_tbr   r   s    r    logging_modezX_get_debug_context_and_cb.<locals>.CaptureLogs.get_context_manager.<locals>.logging_mode  s[     &( &!$$$vV&Ze*5'DItx%%& & & & & &s1   AAAA	AA	AAA)r   contextmanager)r   r"  r   s   ` r    get_context_managerzB_get_debug_context_and_cb.<locals>.CaptureLogs.get_context_manager  s%    &&& '&
  >!r2   N)r.   rL   rM   r   r$  )r   s   r    CaptureLogsr    s    		"r2   r%  ec           
         d }j                   J j                   J t        t        j                   |d       |d      dj	                  j                         dj	                  j                                     | )Nc           
      2   d}t        |j                        }t        t        |j                  |j                              D ]S  \  }\  }}|| d|dz    d| d|  dz  }d}|D ]+  }|d   d	k(  }	|s|	s|	rd
}||d    d|d    d|d    dz  }- |dz  }U |S )Nr  z   (rW   z of z in z)

Fname__torch_dispatch__Tfilename:liner	  z

)rY   r  r   ri   r  )
labelr   r/   	total_lenr   logtbfound_torch_dispatchr-  is_torch_dispatchs
             r    
get_str_tbzF_get_debug_context_and_cb.<locals>.unpack_error_cb.<locals>.get_str_tb  s    CL--.I )#l.?.?AQAQ*R S 9C#d1q5'i[UG5II',$ QD(,V8L(L%/8I */3, d:./qfaV~RPPCQ v Jr2   original	recomputer	  )forward_tracesrecompute_tracesforward_opsrecompute_ops)r  r   _checkpoint_error_templateformatr   )r&  r4  capture_logs_fwdcapture_logs_recomputes     r    r  z2_get_debug_context_and_cb.<locals>.unpack_error_cb  s    	"  $$000%**666&--)*6FG!+K9O!P II&6&;&;<"ii(>(C(CD	 . 
 	r2   c                  D     j                         j                         fS r5   )r$  )r=  r>  s   r    r   z-_get_debug_context_and_cb.<locals>.context_fn  s!    3357M7a7a7cccr2   )platformmachinesystemr   )r%  r  r   r=  r>  r   s      @@@r    _get_debug_context_and_cbrC    sd     8+L0AW0LF" " #}(]? :d &&r2   r1   c                 J    | j                   | j                  | j                  dS )Nshapero   r>   rE  r1   s    r    _default_meta_extractorrH  	  s#     (( r2   nonec                      y r5   rI   )_s    r    <lambda>rL    s    r2   "_allowed_determinism_checks_to_fnsc                       e Zd Zy)_StopRecomputationErrorNr   rI   r2   r    rO  rO    r   r2   rO  c                   (     e Zd Zdedef fdZ xZS )_recomputation_hooktarget_frame_refr
  c                 <    fd}d }t         |   ||       y )Nc                    | j                   r| j                         n| }         }|J |j                     }|j                  xx   dz  cc<   |t        |j                        k\  r.|j
                  rJ |j                  s	d|_        | S t        d       |j                  |          }|]t        |j                  j                  d       d u        t               |j                  <   | |j                     |j                     <   |j
                  r+|j                     t        |j                        k(  rt        | S )NrW   Tzotorch.utils.checkpoint: trying to save more tensors during recomputation than during the original forward pass.)r+   r*   r   rY   r   r  r  r  r   r   r   getr   r   rO  )r1   target_frame
recomp_idxr  r
  rR  s       r    	pack_hookz/_recomputation_hook.__init__.<locals>.pack_hook  sA   oo
1A+-L+++%44S9J'',1,S!:!:;;'2222#55 :>L6H%K 
 ;\..z:<F ! !3!3C!>$!FG&-is#DE'',V^^C-@A&&<+F+Fs+Ks))P , .-Hr2   c                     | S r5   rI   rG  s    r    unpack_hookz1_recomputation_hook.__init__.<locals>.unpack_hookC  s	     Hr2   )superr   )r   rR  r
  rX  rZ  	__class__s    ``  r    r   z_recomputation_hook.__init__  s    $	L	
 	K0r2   )r.   rL   rM   r   r   r   __classcell__r\  s   @r    rQ  rQ    s    ,1 ,1S ,1 ,1r2   rQ  c                        e Zd Z fdZ xZS )_checkpoint_hookc                     fd}fdj                   fd}t        | 	  ||       y t        | 	  |       y )Nc                 <   t               }j                  j                  t        j                  |             j
                  It        j                         5  j                  j                  j                  |              d d d        |S |S # 1 sw Y   |S xY wr5   )	r   r   r)   r   refr  r'   r   r  )r1   r  frames     r    rX  z,_checkpoint_hook.__init__.<locals>.pack_hookM  s|    YF%%gkk&&9:  ,]]_ C%%,,U->->q-ABCM6MCMs   +BBc                 `   t         j                  j                         }|dk(  rt        t	        j
                               }j                  |   sj                  j                  }|j                  |j                        }	 t        t        j                        |      5  t         j                  j                         5   j                   |  d d d        d d d        dj                  |<   j%                  |       t'        || j(                  v        | j(                  |   t+        d      t'        | j(                  |   j,                  |   v        j,                  |   | j(                  |      }d | j(                  |<   |S # 1 sw Y   xY w# 1 sw Y   xY w# t"        $ r Y w xY w)Nr   Tztorch.utils.checkpoint: Unpack is being triggered for a tensor that was already unpacked once. If you are calling ctx.saved_tensors in backward, make sure to do so only once. Otherwise please open an issue with details on your use case.)r'   _C_current_graph_task_idr   uuiduuid4r   r   grad_fnr   r   rQ  r   rc  r   r   r   rO  r  r   r   r   r   )r  r
  r   r[   r   rd  s        r    rZ  z._checkpoint_hook.__init__.<locals>.unpack_hookW  s   ((113Cby$**,'&&s+''//||C$5$56,E*C 2~~1132 +**D12 2 ,0##C(44S9SFNN23~~c"*%b 
 V^^C0E4D4DS4IIJ""3's(;<C"&FNN3J)2 2 2 2 / sB   F! "FF	FF! 	F	FFF! !	F-,F-c                 d    	  |       S # t         $ r}j                  |       Y d }~y d }~ww xY wr5   )r   r  )r  r&  rd  rZ  s     r    unpack_hook_with_error_cbz<_checkpoint_hook.__init__.<locals>.unpack_hook_with_error_cby  s4    -&v..& -))!,,-s    	/*/)r  r[  r   )r   rd  rX  rl  rZ  r\  s    `  @r    r   z_checkpoint_hook.__init__L  sB    		B   ,-
 GY(ABGY4r2   )r.   rL   rM   r   r]  r^  s   @r    r`  r`  K  s    45 45r2   r`  c                 b    |D ]*  }t        |t        j                        st        |      s* y y)NTF)r%   r'   r(   r   )funcr[   r   rT   s       r    _is_compilingro    s/      c5<<(VC[ r2   c                       e Zd Zd Zd Zy)_VersionWrapperc                 x    || _         t        |t        j                        r|j                  | _        y d | _        y r5   )valr%   r'   r(   _versionversion)r   rs  s     r    r   z_VersionWrapper.__init__  s(    -06@ell6ScllY]r2   c                     | j                   0|s.| j                  j                  | j                   k7  rt        d      | j                  S )NzETensor cached during selective activation checkpoint has been mutated)ru  rs  rt  r,   )r   allow_cache_entry_mutations     r    get_valz_VersionWrapper.get_val  s@    <<#,Fxx  DLL0"[  xxr2   N)r.   rL   rM   r   rx  rI   r2   r    rq  rq    s    ^r2   rq  c                 R   t        | t        j                        r| j                         s| j	                         s|r]t        j
                  j                  t        j
                  j                  j                  d      5  | j                         } d d d        | S | S # 1 sw Y   | S xY wr   )
r%   r'   r(   is_floating_point
is_complexrf  _SetExcludeDispatchKeyGuardDispatchKeyADInplaceOrViewr*   r1   any_ret_has_alias_infos     r    _maybe_detachr    s|     !U\\"(;(;(=SiXX11%((2F2F2V2VX]^ 	 
A	 H1H	 Hs   ?BB&c                       e Zd ZdZd Zy)r   a  
    Context passed to policy function during selective checkpointing.

    This class is used to pass relevant metadata to the policy function during
    selective checkpointing. The metadata includes whether the current invocation
    of the policy function is during recomputation or not.

    Example:
        >>> # xdoctest: +SKIP(stub)
        >>>
        >>> def policy_fn(ctx, op, *args, **kwargs):
        >>>    print(ctx.is_recompute)
        >>>
        >>> context_fn = functools.partial(create_selective_checkpoint_contexts, policy_fn)
        >>>
        >>> out = torch.utils.checkpoint.checkpoint(
        >>>     fn, x, y,
        >>>     use_reentrant=False,
        >>>     context_fn=context_fn,
        >>> )
    c                    || _         y r5   is_recompute)r   r  s     r    r   z#SelectiveCheckpointContext.__init__  s
    (r2   N)r.   rL   rM   rN   r   rI   r2   r    r   r     s    *)r2   r   c                        e Zd ZdZdZdZdZdZy)r   a  
    Enum for specifying the policy for checkpointing during backpropagation.

    The following policies are supported:

    - ``{MUST,PREFER}_SAVE``: The operation's output will be saved during the forward
      pass and will not be recomputed during the backward pass
    - ``{MUST,PREFER}_RECOMPUTE``: The operation's output will not be saved during the
      forward pass and will be recomputed during the backward pass

    Use ``MUST_*`` over ``PREFER_*`` to indicate that the policy should not be overridden
    by other subsystems like `torch.compile`.

    .. note::
        A policy function that always returns ``PREFER_RECOMPUTE`` is
        equivalent to vanilla checkpointing.

        A policy function that returns ``PREFER_SAVE`` every op is
        NOT equivalent to not using checkpointing. Using such a policy would
        save additional tensors not limited to ones that are actually needed for
        gradient computation.
    r   rW   r      N)r.   rL   rM   rN   	MUST_SAVEPREFER_SAVEMUST_RECOMPUTEPREFER_RECOMPUTErI   r2   r    r   r     s    , IKNr2   r   c                 F    | rt         j                  S t         j                  S r5   r   r  r  )r   s    r    _policy_from_boolr    s    )*%%Q0@0Q0QQr2   c                       e Zd Zd ZddZy)_CachingTorchDispatchModec                      || _         || _        y r5   )	policy_fnstorage)r   r  r  s      r    r   z"_CachingTorchDispatchMode.__init__  s    "r2   Nc                    |t         v r ||i |S |i n|} | j                  t        d      |g|i |}t        |t              rt        |      }t        |||      }|r|t        j                  d<    ||i |}t        d |j                  j                  D              |t        j                  t        j                  fv s|r+| j                  |   j!                  t#        fd|             |S )NFr  r6  c              3   8   K   | ]  }|j                   d u  y wr5   )
alias_info)r7   r   s     r    r8   z?_CachingTorchDispatchMode.__torch_dispatch__.<locals>.<genexpr>  s     $`CS^^4%?$`s   c                 .    t        t        |             S r5   )rq  r  r  s    r    rL  z>_CachingTorchDispatchMode.__torch_dispatch__.<locals>.<lambda>  s    WXZpIq9r r2   )r   r  r   r%   r   r  ro  fx_tracebackcurrent_metar9   _schemareturnsr   r  r  r  r)   r   )	r   rn  typesr[   r   policyis_compilingr/   r  s	           @r    r*  z,_CachingTorchDispatchMode.__torch_dispatch__  s    ?"(((~6 : N $7'+7/57fd#&v.F$T485;L%%k2D#F#!$$`4<<K_K_$`!`&002B2N2NOOS_LL%%h/rtw&xy
r2   rI   Nr.   rL   rM   r   r*  rI   r2   r    r  r    s    r2   r  c                       e Zd Zd ZddZy)_CachedTorchDispatchModec                 .    || _         || _        || _        y r5   )r  r  rw  )r   r  r  rw  s       r    r   z!_CachedTorchDispatchMode.__init__  s    "*D'r2   Nc                     |t         v r ||i |S |i n|}  j                  t        d      |g|i |}t        |t              rt        |      }t        |||      }|t        j                  t        j                  fv s|rd j                  j                  |      }|t        | d      t        |      dk(  rt        d      t         fd|j                  d            }|S  ||i |}|S )NTr  z6 encountered during backward, but not found in storager   zTrying to backward an extra time. You are only allowed to backward once on any region computed under selective activation checkpoint.c                 :    | j                  j                        S r5   )rx  rw  )r1   r   s    r    rL  z=_CachedTorchDispatchMode.__torch_dispatch__.<locals>.<lambda>0  s    QYYt/N/N%O r2   )r   r  r   r%   r   r  ro  r   r  r  r  rU  r,   rY   r   r   )	r   rn  r  r[   r   r  r  r  r/   s	   `        r    r*  z+_CachedTorchDispatchMode.__torch_dispatch__  s   ?"(((~6 : M $7'+7/57fd#&v.F$T48&002B2N2NOOS_ll&&t,G"dV+a#bcc7|q "T  OQXQ\Q\]^Q_`C 
 ''C
r2   r  r  rI   r2   r    r  r    s    E
r2   r  c           	          t         t              rv D ]k  }t        |t        j                  j                        r(t        |t        j                  j
                        rdnd}t        d| dt        |       d|         fd}nt               r }nt        d      t        t              }t        ||      t        |||      fS )a  
    Helper to avoid recomputing certain ops during activation checkpointing.

    Use this with `torch.utils.checkpoint.checkpoint` to control which
    operations are recomputed during the backward pass.

    Args:
        policy_fn_or_list (Callable or List):
          - If a policy function is provided, it should accept a
            :class:`SelectiveCheckpointContext`, the :class:`OpOverload`, args and
            kwargs to the op, and return a :class:`CheckpointPolicy` enum value
            indicating whether the execution of the op should be recomputed or not.
          - If a list of operations is provided, it is equivalent to a policy
            returning `CheckpointPolicy.MUST_SAVE` for the specified
            operations and `CheckpointPolicy.PREFER_RECOMPUTE` for all other
            operations.
        allow_cache_entry_mutation (bool, optional): By default, an error is
            raised if any tensors cached by selective activation checkpoint are
            mutated in order to ensure correctness. If set to `True`, this check
            is disabled.
    Returns:
        A tuple of two context managers.

    Example:
        >>> # xdoctest: +REQUIRES(LINUX)
        >>> import functools
        >>>
        >>> x = torch.rand(10, 10, requires_grad=True)
        >>> y = torch.rand(10, 10, requires_grad=True)
        >>>
        >>> ops_to_save = [
        >>>    torch.ops.aten.mm.default,
        >>> ]
        >>>
        >>> def policy_fn(ctx, op, *args, **kwargs):
        >>>    if op in ops_to_save:
        >>>        return CheckpointPolicy.MUST_SAVE
        >>>    else:
        >>>        return CheckpointPolicy.PREFER_RECOMPUTE
        >>>
        >>> context_fn = functools.partial(create_selective_checkpoint_contexts, policy_fn)
        >>>
        >>> # or equivalently
        >>> context_fn = functools.partial(create_selective_checkpoint_contexts, ops_to_save)
        >>>
        >>> def fn(x, y):
        >>>     return torch.sigmoid(torch.matmul(torch.matmul(x, y), y)) * y
        >>>
        >>> out = torch.utils.checkpoint.checkpoint(
        >>>     fn, x, y,
        >>>     use_reentrant=False,
        >>>     context_fn=context_fn,
        >>> )
    zPlease update the OpOverloadPacket to a specific OpOverload.For example, if you have `torch.ops.aten.mm`, change it to `torch.ops.aten.mm.default`.r  z6Expected op in `op_list` to be an OpOverload but got: z	 of type z. c                 L    |v rt         j                  S t         j                  S r5   r  )r   opr[   r   policy_fn_or_lists       r    r  z7create_selective_checkpoint_contexts.<locals>.policy_fn{  s$    &&'111'888r2   z=policy_fn_or_list must be either a function or a list of ops.)r%   r   r'   _ops
OpOverloadOpOverloadPacketr   r-   callable	TypeErrorr   r  r  )r  rw  r  
_extra_msgr  r  s   `     r    r   r   6  s    r #T*# 		Bb%**"7"78  EJJ$?$?@nFH  !LRD Q#Bxj:,8 			9
 
#	$%	WXX$/$5G!)W5 G5OP r2   c           
   /      K   d}t         t         r$n|r!|t        k7  rt        d      t               \  }}|t        v r
t        |   }n,t        dt        t        j                                d|       t        | t              }	 |       \  }
t         ||      r0|t        k7  r't        |
t              rt        t              sJ d       t              \  r0t        j                         dt        |	dd      rd	t!        | \   f
d
}t#        |t$        ||      }t        j&                  dd	      }t)        j*                  ||g| |_        |j,                  j.                  d yt1        |      5  |
5  d ddd       ddd       d	|_        t        |	dd      rrst5        d      y# 1 sw Y   5xY w# 1 sw Y   9xY ww)aZ  Checkpointing without reentrant autograd.

    Args:
        function: describes what to run in the forward pass of the model or
            part of the model. It should also know how to handle the inputs
            passed as the tuple. For example, in LSTM, if user passes
            ``(activation, hidden)``, :attr:`function` should correctly use the
            first input as ``activation`` and the second input as ``hidden``
        preserve_rng_state(bool, optional):  Omit stashing and restoring
            the RNG state during each checkpoint.
            Default: ``True``
        context_fn(Callable, optional): A callable returning a tuple of two
            context managers. The function and its recomputation will be run
            under the first and second context managers respectively.
        determinism_check(str, optional): A string specifying the determinism
            check to perform. By default it is set to ``"default"`` which
            compares the shapes, dtypes, and devices of the recomputed tensors
            against those the saved tensors. To turn off this check, specify
            ``"none"``. Currently these are the only two supported values.
            Please open an issue if you would like to see more determinism
            checks.
        debug(bool, optional): If ``True``, error messages will also include
            a trace of the operators ran during the original forward computation
            as well as the recomputation.
        *args: Arguments to pass in to the given ``function``.
        **kwargs: Keyword arguments to pass into the given ``function``.
    Nz6debug=True is incompatible with non-default context_fnz#determinism_check should be one of z
, but got z}In torch.compile mode, `context_fn` arg passed to `torch.utils.checkpoint` must generate a tuple of two `TorchDispatchMode`s.rf   Fr{   Tc            	      V  
 | ^}}g }rr}t         j                  j                  |      5  r%t        j                  	       rt	        
       t         j
                  j                        r!t        j
                  j                  ddint        j                         }|5  t        j
                  j                  di 5  5   |i | d d d        d d d        d d d        d d d        y # 1 sw Y   "xY w# 1 sw Y   &xY w# 1 sw Y   *xY w# 1 sw Y   y xY w)Nr   rf   rg   rI   r   )
r'   r   r   rj   r   rq   rr   r   r   r   )r"   r   r[   r   r   rw   rv   rg   fnr~   rd   r   r   r}   recompute_contexts        r    r   z=_checkpoint_without_reentrant_generator.<locals>.recompute_fn  s-    "3%K\\""); # 
 	$ "##M2$%k3DR]^ 00= #())"4"4 #'#+A#CMCYCYC[   % $eii&8&8&VBU&V $Xi $D#F#$ $ $	$ 	$$ $ $ $ $ $	$ 	$sT   A>D. DD	C;	D"D*D;D DDDD	DD(r   )r+   zPyTorch's device state was initialized in the forward pass of a Checkpoint, which is not allowed. Please open an issue if you need this feature.)r   r   r   rC  rM  r   keysr]   rA   ro  r%   r
   rx   r'   rc   r?   r   r   r   r   r   r   r   rj  r`  r  r,   )r  r}   r   r   r   r[   r   r  r  r@   forward_contextr   	new_framedummyrw   rv   rg   r~   rd   r   r   r  s   ``            @@@@@@@@r    r   r     s    H O$=$I u(H  '@&A#
O>>89JK1$7Y7^7^7`2a1b c()+
 	

 %d+K&{3M)3&O&Rv&:+H(9:(*;<	AA		A= 3GS^2_//++- "=.%8 $->-E*K*$ $* !%	I KKD1E+11%G$GI $$,	)	$ o  "&I}ne4"3 (
 	
    s6   FG%GGG!,G%G	GG"G%rK   )r#   Nr5   )F)cr   r@  rh  r:   r   collectionsr   typingenumr   r'   torch.fx.tracebackfx	tracebackr  /torch._functorch._aot_autograd.functional_utilsr   torch.utils._pytreer   &torch.testing._internal.logging_tensorr   r	   torch.utils._python_dispatchr
   __all___DEFAULT_DETERMINISM_MODEr   Optionalr   __annotations__r#  r   r   r   r(   r   Iterabler   rA   r   r]   Listr   r   r   rx   r   Functionr   r   _disable_dynamoCallableContextManagerrP   r   r   r   r   r   r   r   r   r   r;  r,   r   rC  DictrH  rM  	ExceptionrO  graphsaved_tensors_hooksrQ  r`  ro  rq  r  r   Enumr   r  opsatenr*   r   primr>   rX   _subclassesfunctional_tensorFunctionalTensormetadata_fnsr   r  r  r   r   rI   r2   r    <module>r     s        #   !  ) ) B ( R :& & ,0 8D> 0 )(4. ) )*
E#s(O 
ellC6G0H 
&
HSM 
d 
6 6BD-d3iell1C&C D -( 7; /(7&f$00 f$R>  %)FU6] D>] U>>#ABBC	]
 ] ] ]@YGx
V !%  -d - -<	 	8 8
"Ienn-- "IJ\ \~: x	l 	9'5"c'):HoEVX\E\<])]#^ 9'vu|| S#X  6
NP "Dh~s7J.K)K$L 	i 	-1%....BB -1`55u~~++?? 55p  &) )4tyy :R 
IINN!! 
IINN!! ++<<IIJK 1 <0 DSt FU6 U>>#ABBC 	
 r2   