
    QVji              	           d Z ddlmZmZ ddlmZ ddlmZmZ ddl	m
Z
 ddlmZmZ g dZ G d d	          Z ed
e
          ZddedededefdZddededefdZdS )z;Weight Normalization from https://arxiv.org/abs/1602.07868.    )AnyTypeVar)
deprecated)_weight_normnorm_except_dim)Module)	ParameterUninitializedParameter)
WeightNormweight_normremove_weight_normc                       e Zd ZU eed<   eed<   dededdfdZdedefdZ	e
 ede	          dededd fd
                        ZdeddfdZdededdfdZdS )r   namedimreturnNc                 *    |d}|| _         || _        d S )N)r   r   )selfr   r   s      T/root/voice-cloning/.venv/lib/python3.11/site-packages/torch/nn/utils/weight_norm.py__init__zWeightNorm.__init__   s    ;C	    modulec                     t          || j        dz             }t          || j        dz             }t          ||| j                  S N_g_v)getattrr   r   r   )r   r   gvs       r   compute_weightzWeightNorm.compute_weight   sB    FDI,--FDI,--Aq$(+++r   ze`torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`.)categoryc           
      ~   | j                                         D ]4}t          |t                    r|j        |k    rt          d|           5|d}t          ||          }t          | |          }t          |t                    rt          d          | j	        |= | 
                    |dz   t          t          |d|          j                             | 
                    |dz   t          |j                             t          | ||                    |                      |                     |           |S )Nz<Cannot register two weight_norm hooks on the same parameter r   zThe module passed to `WeightNorm` can't have uninitialized parameters. Make sure to run the dummy forward before applying weight normalizationr      r   )_forward_pre_hooksvalues
isinstancer   r   RuntimeErrorr   r
   
ValueError_parametersregister_parameterr	   r   datasetattrr    register_forward_pre_hook)r   r   r   hookfnweights         r   applyzWeightNorm.apply   s\    -4466 	 	D$
++ 	T0A0A"YSWYY   ;Cc""&&f455 	Z  
 t$ 	!!4K?61c#B#B#GHH	
 	
 	
 	!!$+y/E/EFFFb//77888 	((,,,	r   c                     |                      |          }t          || j                   |j        | j        dz   = |j        | j        dz   = t	          || j        t          |j                             d S r   )r    delattrr   r)   r,   r	   r+   )r   r   r0   s      r   removezWeightNorm.removeF   so    $$V,,	"""ty4/0ty4/0	9V[#9#9:::::r   inputsc                 X    t          || j        |                     |                     d S )N)r,   r   r    )r   r   r5   s      r   __call__zWeightNorm.__call__M   s*    	4#6#6v#>#>?????r   )__name__
__module____qualname__str__annotations__intr   r   r   r    staticmethodr   FutureWarningr1   r4   r7    r   r   r   r      s$        
III	HHHS s t    ,V , , , , ,
 Z	E  
C c l     \B;V ; ; ; ; ;@v @s @t @ @ @ @ @ @r   r   T_module)boundr0   r   r   r   r   c                 >    t                               | ||           | S )aE	  Apply weight normalization to a parameter in the given module.

    .. math::
         \mathbf{w} = g \dfrac{\mathbf{v}}{\|\mathbf{v}\|}

    Weight normalization is a reparameterization that decouples the magnitude
    of a weight tensor from its direction. This replaces the parameter specified
    by :attr:`name` (e.g. ``'weight'``) with two parameters: one specifying the magnitude
    (e.g. ``'weight_g'``) and one specifying the direction (e.g. ``'weight_v'``).
    Weight normalization is implemented via a hook that recomputes the weight
    tensor from the magnitude and direction before every :meth:`~Module.forward`
    call.

    By default, with ``dim=0``, the norm is computed independently per output
    channel/plane. To compute a norm over the entire weight tensor, use
    ``dim=None``.

    See https://arxiv.org/abs/1602.07868

    .. warning::

        This function is deprecated.  Use :func:`torch.nn.utils.parametrizations.weight_norm`
        which uses the modern parametrization API.  The new ``weight_norm`` is compatible
        with ``state_dict`` generated from old ``weight_norm``.

        Migration guide:

        * The magnitude (``weight_g``) and direction (``weight_v``) are now expressed
          as ``parametrizations.weight.original0`` and ``parametrizations.weight.original1``
          respectively.  If this is bothering you, please comment on
          https://github.com/pytorch/pytorch/issues/102999

        * To remove the weight normalization reparametrization, use
          :func:`torch.nn.utils.parametrize.remove_parametrizations`.

        * The weight is no longer recomputed once at module forward; instead, it will
          be recomputed on every access.  To restore the old behavior, use
          :func:`torch.nn.utils.parametrize.cached` before invoking the module
          in question.

    Args:
        module (Module): containing module
        name (str, optional): name of weight parameter
        dim (int, optional): dimension over which to compute the norm

    Returns:
        The original module with the weight norm hook

    Example::

        >>> m = weight_norm(nn.Linear(20, 40), name='weight')
        >>> m
        Linear(in_features=20, out_features=40, bias=True)
        >>> m.weight_g.size()
        torch.Size([40, 1])
        >>> m.weight_v.size()
        torch.Size([40, 20])

    )r   r1   )r   r   r   s      r   r   r   T   s"    x VT3'''Mr   c                     | j                                         D ]F\  }}t          |t                    r,|j        |k    r!|                    |            | j         |= | c S Gt          d| d|            )a  Remove the weight normalization reparameterization from a module.

    Args:
        module (Module): containing module
        name (str, optional): name of weight parameter

    Example:
        >>> m = weight_norm(nn.Linear(20, 40))
        >>> remove_weight_norm(m)
    zweight_norm of 'z' not found in )r$   itemsr&   r   r   r4   r(   )r   r   kr.   s       r   r   r      s     ,2244  4dJ'' 	DI,=,=KK)!,MMM
EEEVEE
F
FFr   N)r0   r   )r0   )__doc__typingr   r   typing_extensionsr   torchr   r   torch.nn.modulesr   torch.nn.parameterr	   r
   __all__r   rA   r;   r=   r   r   r@   r   r   <module>rN      sC   B B         ( ( ( ( ( ( / / / / / / / / # # # # # # @ @ @ @ @ @ @ @ >
=
=?@ ?@ ?@ ?@ ?@ ?@ ?@ ?@D 7:V,,,= = = =S = = = = =@G Gx Gs G( G G G G G Gr   