
    Ǆg                        d dl Z d dlmZ d dlZd dlZd dlmZmZm	Z	m
Z
mZ d dlmZmZ d dlmZ d dlmZ d Z ej(                         rdZd	 Zy ej,                   ej.                   ej.                  e            dd      Zd	 Zy)
    N)
backcompatcollect_envdatadeterministichooks)(generate_methods_for_privateuse1_backendrename_privateuse1_backend)get_cpp_backtrace)ThroughputBenchmarkc                 H    t        |t              st        d      || _        y)z[
    Set the module attribute on a python object for a given object for nicer printing
    z#The mod argument should be a stringN)
isinstancestr	TypeError
__module__)objmods     \/home/mcse/projects/flask_80/flask-venv/lib/python3.12/site-packages/torch/utils/__init__.py
set_moduler      s!     c3=>>CN    sharecmakec           	          t        j                         rt        d      t        j                        rt        d      t        t	        j
                   j                              }t        t	        j
                  j                              }||k7  rt        d       fd}d dfd	} | d        |d        |d	        |d
       |D ]  }t         |      rt        |      r	 ||       $t         |      r$t        |t         |             t         |       Tt        |      sat         |t        |             t        |        t        j                  j                          y)z
    This function swaps the content of the two Tensor objects.
    At a high level, this will make t1 have the content of t2 while preserving
    its identity.

    This will not work if t1 and t2 have different slots.
    z8Cannot swap t1 because it has weakref associated with itz8Cannot swap t2 because it has weakref associated with itz2Cannot swap t1 and t2 if they have different slotsc                 f    t        |       }t        | t        |              t        | |       y )N)getattrsetattr)nametmpt1t2s     r   	swap_attrzswap_tensors.<locals>.swap_attr;   s.    b$D72t,.D#r   c                     t        d      )Na  Trying to execute AccumulateGrad node that was poisoned by swap_tensors this can happen when you try to run backward on a tensor that was swapped. For a module m with `torch.__future__.set_swap_module_params_on_conversion(True)` you should not change the device or dtype of the module (e.g. `m.cpu()` or `m.half()`) between running forward and backward. To resolve this, please only change the device/dtype before running forward (or after both forward and backward).)RuntimeError)grad_outputss    r   error_pre_hookz$swap_tensors.<locals>.error_pre_hook@   s    X
 	
r   r   c                 @   | j                         }d| d| d}|dkD  r|dk(  ro| j                  rct        j                  j                  j                  |       j                  }| j                         dk(  r|j                         y t        |      t        |      y )NzExpected use_count of z2 to be 1 or 2 with an AccumulateGrad node but got zH make sure you are not holding references to the tensor in other places.      )	
_use_countis_leaftorchautogradgraphget_gradient_edgenoderegister_prehookr"   )tr   	use_count	error_straccum_grad_noder$   s        r   check_use_countz%swap_tensors.<locals>.check_use_countJ   s    LLN	$TF*\]f\g hV W 	 q=A~!))"'.."6"6"H"H"K"P"P<<>Q&#44^D&y11"9-- r   r   	__class____dict__N)r   )weakrefgetweakrefsr"   setcopyreg
_slotnamesr5   hasattrr   r   delattrr*   _C_swap_tensor_impl)r   r   t1_slotst2_slotsr    r4   slotr$   s   ``     @r   swap_tensorsrC   )   sG    2UVV2UVV7%%bll34H7%%bll34H8OPP

." BB k j  2tT!2dORBwr402BRBwr402B 
HHr2&r   )r:   os.pathpath_ospr7   r*   torch.utilsr   r   r   r   r    torch.utils.backend_registrationr   r	   torch.utils.cpp_backtracer
    torch.utils.throughput_benchmarkr   r   _running_with_deploycmake_prefix_pathjoindirname__file__rC    r   r   <module>rQ      s         8 @ 5H' "		\T\\(+,gw
H'r   