
    pi                        d dl mZ d dlmZmZ d dlZerd dlmZ ddZ e       r"ej                  j                         s ed       e       rd dlmZmZmZmZmZmZmZmZmZmZmZ dd	gZ G d
 d      Zy)    )annotations)AnyTYPE_CHECKINGN)TracebackTypec                 6    t        t        j                  d      S )N_dist_autograd_init)hasattrtorch_C     e/opt/services/ai/voice_agent/venv/lib/python3.12/site-packages/torch/distributed/autograd/__init__.pyis_availabler      s    588233r   z/Failed to initialize torch.distributed.autograd)_current_context_get_debug_info_get_max_id_init_is_valid_context_new_context_release_context_retrieve_contextbackwardDistAutogradContextget_gradientscontextr   c                  0    e Zd ZdZddZ	 	 	 	 	 	 	 	 ddZy)r   a!  
    Context object to wrap forward and backward passes when using
    distributed autograd. The ``context_id`` generated in the ``with``
    statement  is required to uniquely identify a distributed backward pass
    on all workers. Each worker stores metadata associated with this
    ``context_id``, which is required to correctly execute a distributed
    autograd pass.

    Example::
        >>> # xdoctest: +SKIP
        >>> import torch.distributed.autograd as dist_autograd
        >>> with dist_autograd.context() as context_id:
        >>>     t1 = torch.rand((3, 3), requires_grad=True)
        >>>     t2 = torch.rand((3, 3), requires_grad=True)
        >>>     loss = rpc.rpc_sync("worker1", torch.add, args=(t1, t2)).sum()
        >>>     dist_autograd.backward(context_id, [loss])
    c                T    t               | _        | j                  j                         S N)r   autograd_context_context_id)selfs    r   	__enter__zcontext.__enter__8   s!     ,$$0022r   c                J    t        | j                  j                                y r   )r   r   r    )r!   exc_type	exc_value	tracebacks       r   __exit__zcontext.__exit__<   s     	..::<=r   N)returnint)r$   ztype[BaseException] | Noner%   zBaseException | Noner&   zTracebackType | Noner(   None)__name__
__module____qualname____doc__r"   r'   r   r   r   r   r   %   s7    $3>,> (> (	>
 
>r   )r(   bool)
__future__r   typingr   r   r
   typesr   r   r   r   RuntimeErrortorch._C._distributed_autogradr   r   r   r   r   r   r   r   r   r   r   __all__r   r   r   r   <module>r6      sl    " %  #4 >%((668
H
II>    n
%> >r   