
    %VjiX-                        d dl mZ d dlZg dZ G d dej        j                  Z G d dej        j                  Z G d d	ej        j                  Z G d
 dej        j	                  Z	 G d dej        j
                  Z
 G d dej        j                  Z G d dej        j        j        j                  Z G d dej        j                  ZdS )    )warnN)ReLU6	HardswishELU	LeakyReLUSigmoidSoftmaxMultiheadAttentionPReLUc                   H     e Zd ZdZd fd	Zd Zd Zedd            Z xZ	S )r   a  Applies the element-wise function:

    :math:`\text{ReLU6}(x) = \min(\max(x_0, x), q(6))`, where :math:`x_0` is the
    zero_point, and :math:`q(6)` is the quantized representation of number 6.

    Args:
        inplace: can optionally do the operation in-place. Default: ``False``

    Shape:
        - Input: :math:`(N, *)` where `*` means, any number of additional
          dimensions
        - Output: :math:`(N, *)`, same shape as the input

    .. image:: ../scripts/activation_images/ReLU6.png

    Examples::

        >>> m = nn.quantized.ReLU6()
        >>> input = torch.randn(2)
        >>> # xdoctest: +SKIP
        >>> input = torch.quantize_per_tensor(input, 1.0, 0, dtype=torch.qint32)
        >>> output = m(input)
    Fc                 X    t                                          |           || _        d S N)super__init__inplace)selfr   	__class__s     b/root/voice-cloning/.venv/lib/python3.11/site-packages/torch/ao/nn/quantized/modules/activation.pyr   zReLU6.__init__,   s&    !!!    c                 V    t           j        j                            || j                  S r   )torchops	quantizedrelu6r   r   inputs     r   forwardzReLU6.forward0   s    y"((===r   c                     dS )NQuantizedReLU6 r   s    r   	_get_namezReLU6._get_name3       r   c                 *    t          | j                  S r   )r   r   )moduse_precomputed_fake_quants     r   
from_floatzReLU6.from_float6   s    S[!!!r   F)
__name__
__module____qualname____doc__r   r   r"   staticmethodr'   __classcell__r   s   @r   r   r      s         0     > > >      " " " \" " " " "r   r   c                   ^     e Zd ZdZd	 fd	Zd Zd Zed
d            Ze	d             Z
 xZS )r   zThis is the quantized version of :class:`~torch.nn.Hardswish`.

    Args:
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
    Nc                     ||d}t                                                       |                     dt          j        |fi |           |                     dt          j        |fi |           d S N)devicedtypescale
zero_pointr   r   register_bufferr   tensor)r   r5   r6   r3   r4   factory_kwargsr   s         r   r   zHardswish.__init__C   s|    $*U;;Wel5&K&KN&K&KLLL\5<
+U+Un+U+UVVVVVr   c                 b    t           j        j                            || j        | j                  S r   )r   r   r   	hardswishr5   r6   r   s     r   r   zHardswish.forwardI   s#    y",,UDJPPPr   c                     dS )NQuantizedHardswishr    r!   s    r   r"   zHardswish._get_nameL       ##r   Fc                     | j                                         \  }}t          t          |          t	          |                    S r   )activation_post_processcalculate_qparamsr   floatintr%   r&   r5   r6   s       r   r'   zHardswish.from_floatO   s7    7IIKKzus:777r   c                 N     | t          |          t          |                    S r   )rC   rD   clsr%   r5   r6   s       r   from_referencezHardswish.from_referenceT   s     s5<<Z111r   )NNr(   r)   r*   r+   r,   r   r   r"   r-   r'   classmethodrI   r.   r/   s   @r   r   r   ;   s         W W W W W WQ Q Q$ $ $ 8 8 8 \8 2 2 [2 2 2 2 2r   r   c                   ^     e Zd ZdZd	 fd	Zd Zd Zed
d            Ze	d             Z
 xZS )r   zThis is the quantized equivalent of :class:`~torch.nn.ELU`.

    Args:
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
        alpha: the alpha constant
          ?c                 f    t                                          |           || _        || _        d S r   )r   r   r5   r6   )r   r5   r6   alphar   s       r   r   zELU.__init__b   s-    
$r   c                     t           j        j        j        j                            || j        | j        | j                  S r   )	r   aonnr   
functionalelur5   r6   rO   r   s     r   r   zELU.forwardg   s3    x{$/334:t

 
 	
r   c                     dS )NQuantizedELUr    r!   s    r   r"   zELU._get_namel   s    ~r   Fc                     | j                                         \  }}t          t          |          t	          |          | j                  S r   )rA   rB   r   rC   rD   rO   rE   s       r   r'   zELU.from_floato   s;    7IIKKz5<<Z#)<<<r   c                 Z     | t          |          t          |          |j                  S r   )rC   rD   rO   rG   s       r   rI   zELU.from_referencet   s$    s5<<Z#)<<<r   )rM   r(   rJ   r/   s   @r   r   r   Y   s         % % % % % %

 
 

   = = = \= = = [= = = = =r   r   c                   |     e Zd ZdZ	 	 	 	 ddedededed	df
 fd
Zd Zd Z	e
dd            Ze
d             Z xZS )r   a  This is the quantized equivalent of :class:`~torch.nn.LeakyReLU`.

    Args:
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
        negative_slope: Controls the angle of the negative slope. Default: 1e-2
    {Gz?FNr5   r6   negative_sloper   returnc                     ||d}t                                          ||           |                     dt          j        |fi |           |                     dt          j        |fi |           d S r2   r7   )	r   r5   r6   r[   r   r3   r4   r:   r   s	           r   r   zLeakyReLU.__init__   s     %+U;;111Wel5&K&KN&K&KLLL\5<
+U+Un+U+UVVVVVr   c                 z    t           j        j                            || j        | j        | j        | j                  S r   )r   r   r   
leaky_relur[   r   r5   r6   r   s     r   r   zLeakyReLU.forward   s3    y"--4&dj$/
 
 	
r   c                     dS )NQuantizedLeakyReLUr    r!   s    r   r"   zLeakyReLU._get_name   r?   r   c                     |j                                         \  }} | t          |          t          |          |j        |j                  S r   )rA   rB   rC   rD   r[   r   )rH   r%   r&   r5   r6   s        r   r'   zLeakyReLU.from_float   sB    7IIKKzs5<<Z#2DckRRRr   c                 f     | t          |          t          |          |j        |j                  S r   )rC   rD   r[   r   rG   s       r   rI   zLeakyReLU.from_reference   s)    s5<<Z#2DckRRRr   )rZ   FNNr(   )r)   r*   r+   r,   rC   rD   boolr   r   r"   rK   r'   rI   r.   r/   s   @r   r   r   y   s          !%W WW W 	W
 W 
W W W W W W
 
 

$ $ $ S S S [S S S [S S S S Sr   r   c                   J     e Zd ZdZdedef fdZd Zedd            Z	 xZ
S )	r   zThis is the quantized equivalent of :class:`~torch.nn.Sigmoid`.

    Args:
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
    output_scaleoutput_zero_pointc                 d    t                                                       || _        || _        d S r   )r   r   rf   rg   )r   rf   rg   r   s      r   r   zSigmoid.__init__   s/    (!2r   c                 b    t           j        j                            || j        | j                  S r   )r   r   r   sigmoidrf   rg   r   s     r   r   zSigmoid.forward   s,    y"**4$d&<
 
 	
r   Fc                     |j                                         \  }} | t          |          t          |                    S r   )rA   rB   rC   rD   )rH   r%   r&   rf   rg   s        r   r'   zSigmoid.from_float   sD    
 '99;;	
s5&&,=(>(>???r   r(   )r)   r*   r+   r,   rC   rD   r   r   rK   r'   r.   r/   s   @r   r   r      s         3U 3s 3 3 3 3 3 3

 
 

 @ @ @ [@ @ @ @ @r   r   c                   ^     e Zd ZdZd fd	Zd Zd Zedd	            Ze	d
             Z
 xZS )r	   a,  This is the quantized version of :class:`~torch.nn.Softmax`.

    Args:
        dim: A dimension along which Softmax will be computed (so every slice along dim will sum to 1).
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
    NrM   r   c                 r    t                                                       || _        || _        || _        d S r   )r   r   dimr5   r6   )r   rn   r5   r6   r   s       r   r   zSoftmax.__init__   s2    
$r   c                     | j         }|:d}t          j        j                            d|                                 |          }t          j        j                            ||| j        | j	                  S )N   softmax)
rn   r   rR   rS   _get_softmax_dimr   r   rq   r5   r6   )r   r   rn   
stacklevels       r   r   zSoftmax.forward   sc    h;J (%66599;;
 C y"**5#tz4?SSSr   c                     dS )NQuantizedSoftmaxr    r!   s    r   r"   zSoftmax._get_name   s    !!r   Fc                     | j                                         \  }}t          | j        t	          |          t          |                    S r   )rA   rB   r	   rn   rC   rD   rE   s       r   r'   zSoftmax.from_float   s;    7IIKKzswec*oo>>>r   c                 Z     | |j         t          |          t          |                    S r   )rn   rC   rD   rG   s       r   rI   zSoftmax.from_reference   s$    s37E%LL#j//:::r   )NrM   r   r(   rJ   r/   s   @r   r	   r	      s         % % % % % %	T 	T 	T" " " ? ? ? \? ; ; [; ; ; ; ;r   r	   c                   l    e Zd Zej        j        j        j        Zd Z	e
d             Ze
d             ZdS )r
   c                     dS )NQuantizedMultiheadAttentionr    r!   s    r   r"   zMultiheadAttention._get_name   s    ,,r   c                      t          d          )NzpIt looks like you are trying to convert a non-observed MHA module. Please, see the examples on quantizable MHAs.)NotImplementedError)rH   others     r   r'   zMultiheadAttention.from_float   s     "0
 
 	
r   c                    t           j        j                            |d ddd           }| |_        |j        e|j                            d          }t          j        |d          \  }}t          j	        |||t           j
                  }t          |d|           |j        e|j                            d          }t          j        |d          \  }}t          j	        |||t           j
                  }t          |d|           |`|`|S )NFT)mappingr   remove_qconfigconvert_custom_config_dictbias_k)reduce_rangebias_v)r   rQ   quantizationconvertr   r   _parameterspop_choose_qparams_per_tensorquantize_per_tensorquint8setattrr   in_proj_weightin_proj_bias)rH   r}   	convertedr   sczpr   s          r   from_observedz MultiheadAttention.from_observed   s   H)11'+ 2 
 
	 "	 '*..x88F5f5QQQFB.vr2u|LLFIx000'*..x88F5"  FB .vr2u|LLFIx000$"r   N)r)   r*   r+   r   rQ   rR   quantizabler
   _FLOAT_MODULEr"   rK   r'   r   r    r   r   r
   r
      sh        HK+>M- - - 
 
 [
     [     r   r
   c            	            e Zd ZdZ	 ddedededdf fdZd	ej        ddfd
Z	dej        dej        fdZ
d Zedd            Zed             Z xZS )r   a%  This is the quantized equivalent of :class:`~torch.nn.PReLU`.

    Args:
        scale: quantization scale of the output tensor
        zero_point: quantization zero point of the output tensor
        num_parameters: number of parameters: 1, or the number of channels at input. Default: 1
       rf   rg   num_parametersr\   Nc                     t                                                       || _        || _        || _        t          j        |t
          j                  }t          j        |ddt
          j	                  }| 
                    |           d S )N)r4   rM   r   )r5   r6   r4   )r   r   r   r5   r6   r   randnrC   r   r   
set_weight)r   rf   rg   r   wqwr   s         r   r   zPReLU.__init__!  sz     	,!
+Kek:::&qVVVr   r   c                     || _         d S r   )weight)r   r   s     r   r   zPReLU.set_weight,  s    r   r   c                 n    t           j        j                            || j        | j        | j                  S r   )r   r   r   prelur   r5   r6   r   s     r   r   zPReLU.forward/  s.    y"((4;
DO
 
 	
r   c                     dS )NQuantizedPReLUr    r!   s    r   r"   zPReLU._get_name4  r#   r   Fc                 6   |j                                         \  }} | t          |          t          |          |j                  }|j                                        }|j                                        } ||           |j        t          j	        k    rt          d|j                    |                                \  }}	t          j        |t          |          t          |	          t          j	                  }
|                    |
           |S Nz9PReLU's weight observer should have dtype quint8 but got )rA   rB   rC   rD   r   r   qconfigr4   r   r   r   r   r   )rH   r%   r&   r5   r6   qprelufloat_wtobserverwt_scalewt_zpqweights              r   r'   zPReLU.from_float7  s    7IIKKzU5\\3z??C4FGG:##%%;%%''>U\))\HN\\   #4466%+eHoos5zz5<
 
 	'"""r   c                     | t          |          t          |          |j                  }|j                                         }|j                                        } ||           |j        t          j        k    rt          d|j                    |	                                \  }}t          j
        |t          |          t          |          t          j                  }	|                    |	           |S r   )rC   rD   r   r   r   r4   r   r   r   rB   r   r   )
rH   r%   r5   r6   r   r   r   r   r   r   s
             r   rI   zPReLU.from_referenceI  s    U5\\3z??C4FGG:##%%;%%''>U\))\HN\\   #4466%+eHoos5zz5<
 
 	'"""r   )r   r(   )r)   r*   r+   r,   rC   rD   r   r   Tensorr   r   r"   rK   r'   rI   r.   r/   s   @r   r   r     s         RS	 	!	69	KN			 	 	 	 	 	EL T    
U\ 
el 
 
 
 

         ["   [    r   r   )warningsr   r   __all__rR   ReLUr   r   r   r   r   r	   rQ   r   r
   Moduler   r    r   r   <module>r      s         	 	 	%" %" %" %" %"EHM %" %" %"P2 2 2 2 2" 2 2 2<= = = = =%(, = = =@&S &S &S &S &S" &S &S &SR@ @ @ @ @eh @ @ @6$; $; $; $; $;eh $; $; $;N1 1 1 1 10C 1 1 1h@ @ @ @ @EHO @ @ @ @ @r   