
    Ǆge                     6   U d dl Z d dlZd dlZd dlZd dlZd dlZ e j                  e      Zde	fdZ
 e
       r"ej                  j                         s ed      ej                  j                  Zej                  j                   Zej                  j$                  Zej                  j(                  Z e
       rd dlmZmZmZmZmZmZmZmZmZm Z m!Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z1  G d dejd                        Z3i Z4ejj                  e6ejn                  f   e8d<   dd	e6d
e6fdZ9ejt                  dk7  rd dlm;Z; ddl<m=Z=m>Z> ddl? ddl?m@Z@mAZAmBZBmCZCmDZDmEZEmFZFmGZG ddlHmIZI ddlJmKZKmLZLmJZJ  e-        y G d d      ZMeMej                  d   _*        y)    Nreturnc                  6    t        t        j                  d      S )a  
    Return ``True`` if the distributed package is available.

    Otherwise,
    ``torch.distributed`` does not expose any other APIs. Currently,
    ``torch.distributed`` is available on Linux, MacOS and Windows. Set
    ``USE_DISTRIBUTED=1`` to enable it when building PyTorch from source.
    Currently, the default value is ``USE_DISTRIBUTED=1`` for Linux and Windows,
    ``USE_DISTRIBUTED=0`` for MacOS.
    
_c10d_init)hasattrtorch_C     b/home/mcse/projects/flask_80/flask-venv/lib/python3.12/site-packages/torch/distributed/__init__.pyis_availabler      s     588\**r
   z&Failed to initialize torch.distributed)_broadcast_coalesced"_compute_bucket_assignment_by_size_ControlCollectives_DEFAULT_FIRST_BUCKET_BYTES_make_nccl_premul_sum_register_builtin_comm_hook_register_comm_hook_StoreCollectives_test_python_store_verify_params_across_processesBackendBuiltinCommHookType
DebugLevel	FileStoreget_debug_level
GradBucketLoggerPrefixStoreProcessGroupReducerset_debug_levelset_debug_level_from_envStoreTCPStoreWorkc                       e Zd ZdZd Zy)_DistributedPdbz
        Supports using PDB from inside a multiprocessing child process.

        Usage:
        _DistributedPdb().set_trace()
        c                     t         j                  }	 t        d      t         _        t        j                  j
                  | g|i | |t         _        y # |t         _        w xY w)Nz
/dev/stdin)sysstdinopenpdbPdbinteraction)selfargskwargs_stdins       r   r.   z_DistributedPdb.interactionJ   sG    YYF# .	##D:4:6:"	F	s   7A A"N)__name__
__module____qualname____doc__r.   r	   r
   r   r'   r'   B   s    		#r
   r'   _breakpoint_cacherankskipc                    |dkD  rdt        t        t        j                                     }t        j                  |d      dz   }|t        |<   ||k  rt        j                  d|       yt               | k(  r1t               } |j                  d|  d        |j                          t        j                  j                         }t        j                  j                         }t        j                  j!                  d       	 t#                t        j                  j!                  |       ~y# t        j                  j!                  |       ~w xY w)aD  
        Set a breakpoint, but only on a single rank.  All other ranks will wait for you to be
        done with the breakpoint before continuing.

        Args:
            rank (int): Which rank to break on.  Default: ``0``
            skip (int): Skip the first ``skip`` calls to this breakpoint. Default: ``0``.
        r      zSkip the breakpoint, counter=%dNzS
!!! ATTENTION !!!

Type 'up' to get to the frame that called dist.breakpoint(rank=z)
F)hashstr	traceback
format_excr7   getlogwarningget_rankr'   message	set_tracer   r   _meta_in_tls_dispatch_include_DisableTorchDispatch!_set_meta_in_tls_dispatch_includebarrier)r8   r9   keycounterr,   meta_in_tlsguards          r   
breakpointrN   T   s    !8s9//123C'++C3a7G%,c"$=wG:!#CCKKRRVQWWZ\ CMMO hh<<>..02259	IHH66{C HH66{Cs   
D/ /"Ewin32)	HashStorer;   )
DeviceMeshinit_device_mesh)*)_all_gather_base_coalescing_manager_CoalescingManager_create_process_group_wrapper_get_process_group_name_rank_not_in_group_reduce_scatter_baseget_node_local_rank)_remote_device)_create_store_from_optionsregister_rendezvous_handler
rendezvousc                       e Zd Zy)_ProcessGroupStubN)r3   r4   r5   r	   r
   r   ra   ra      s    r
   ra   ztorch.distributed)r   r   )Ologgingr,   r)   r>   typingr   	getLoggerr3   rA   boolr   r   r   RuntimeError
_DistError	DistError_DistBackendErrorDistBackendError_DistNetworkErrorDistNetworkError_DistStoreErrorDistStoreErrortorch._C._distributed_c10dr   r   r   r   r   r   r   r   r   r   r   _Backendr   r   r   r   r   r   r   r   r    r!   r"   r#   r$   r%   _Workr-   r'   r7   DictintAny__annotations__rN   platformrP   device_meshrQ   rR   distributed_c10drT   rU   rV   rW   rX   rY   rZ   r[   remote_devicer\   r_   r]   r^   ra   modulesr	   r
   r   <module>r{      sa    
 
    g!+d + >%((--/
?
@@ HH	88-- 88-- ))>      8##'' #  79v{{3

?38! ! !F ||w89
 $	 	 	 .    5FCKK#$1r
   