
    /;ji                        d dl Z d dlmc mZ d dl mZ ddlmZ ddlmZ  e            rd dl	Z	 ej
                     ej
                     ej                     ej                     ej                    dZdedej        fd	Z G d
 dej                  Z G d dej                  Z G d dej                  Z G d dej                  ZdS )    N)nn   )	deprecate)is_torch_npu_available)swishsilumishgelureluact_fnreturnc                 z    |                                  } | t          v rt          |          S t          d|            )zHelper function to get activation function from string.

    Args:
        act_fn (str): Name of activation function.

    Returns:
        nn.Module: Activation function.
    z!Unsupported activation function: )lowerACTIVATION_FUNCTIONS
ValueError)r   s    V/root/voice-cloning/.venv/lib/python3.11/site-packages/diffusers/models/activations.pyget_activationr   $   s?     \\^^F%%%#F++EVEEFFF    c                   F     e Zd ZdZ fdZdej        dej        fdZ xZS )FP32SiLUzH
    SiLU activation function with input upcasted to torch.float32.
    c                 H    t                                                       d S N)super__init__)self	__class__s    r   r   zFP32SiLU.__init__:   s    r   inputsr   c                     t          j        |                                d                              |j                  S )NF)inplace)Fr   floattodtype)r   r   s     r   forwardzFP32SiLU.forward=   s/    vfllnne44477EEEr   )	__name__
__module____qualname____doc__r   torchTensorr$   __classcell__r   s   @r   r   r   5   sr             Fel Fu| F F F F F F F Fr   r   c            	       `     e Zd ZdZddedededef fdZd	ej	        d
ej	        fdZ
d Z xZS )GELUa  
    GELU activation function with tanh approximation support with `approximate="tanh"`.

    Parameters:
        dim_in (`int`): The number of channels in the input.
        dim_out (`int`): The number of channels in the output.
        approximate (`str`, *optional*, defaults to `"none"`): If `"tanh"`, use tanh approximation.
        bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
    noneTdim_indim_outapproximatebiasc                     t                                                       t          j        |||          | _        || _        d S Nr3   )r   r   r   Linearprojr2   )r   r0   r1   r2   r3   r   s        r   r   zGELU.__init__L   s?    IfgD999	&r   gater   c                     |j         j        dk    rt          j        || j                  S t          j        |                    t          j                  | j                                      |j                  S )Nmps)r2   r#   )	devicetyper    r
   r2   r"   r)   float32r#   r   r9   s     r   r
   z	GELU.geluQ   sg    ;u$$6$D,<====vdggEMg22@PQQQTT[_[eTfffr   c                 Z    |                      |          }|                     |          }|S r   )r8   r
   )r   hidden_statess     r   r$   zGELU.forwardW   s*    		-00		-00r   )r/   T)r%   r&   r'   r(   intstrboolr   r)   r*   r
   r$   r+   r,   s   @r   r.   r.   A   s         ' 's 'S 's 'SW ' ' ' ' ' '
g g%, g g g g      r   r.   c                   \     e Zd ZdZddededef fdZdej        dej        fd	Z	d
 Z
 xZS )GEGLUaF  
    A [variant](https://arxiv.org/abs/2002.05202) of the gated linear unit activation function.

    Parameters:
        dim_in (`int`): The number of channels in the input.
        dim_out (`int`): The number of channels in the output.
        bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
    Tr0   r1   r3   c                     t                                                       t          j        ||dz  |          | _        d S )Nr   r6   r   r   r   r7   r8   r   r0   r1   r3   r   s       r   r   zGEGLU.__init__g   s9    Ifgk===			r   r9   r   c                     |j         j        dk    rt          j        |          S t          j        |                    t
          j                                                |j                  S )Nr;   r<   )r=   r>   r    r
   r"   r)   r?   r#   r@   s     r   r
   z
GEGLU.geluk   sS    ;u$$6$<<vdggEMg223366TZ6HHHr   c                 ^   t          |          dk    s|                    dd           d}t          dd|           |                     |          }t	                      rt          j        |dd          d         S |                    dd	          \  }}||                     |          z  S )
Nr   scalezThe `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`.z1.0.0   )dimr2   r   )rP   )	lengetr   r8   r   	torch_npu	npu_gegluchunkr
   )r   rB   argskwargsdeprecation_messager9   s         r   r$   zGEGLU.forwardq   s    t99q==FJJw55A #Ugw(;<<<		-00!## 	3&}"!LLLQOO"/"5"5aR"5"@"@M4 499T??22r   T)r%   r&   r'   r(   rC   rE   r   r)   r*   r
   r$   r+   r,   s   @r   rG   rG   ]   s         > >s >S > > > > > > >I I%, I I I I
3 
3 
3 
3 
3 
3 
3r   rG   c                   V     e Zd ZdZd
dededef fdZdej        dej        fd	Z	 xZ
S )ApproximateGELUa  
    The approximate form of the Gaussian Error Linear Unit (GELU). For more details, see section 2 of this
    [paper](https://arxiv.org/abs/1606.08415).

    Parameters:
        dim_in (`int`): The number of channels in the input.
        dim_out (`int`): The number of channels in the output.
        bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
    Tr0   r1   r3   c                     t                                                       t          j        |||          | _        d S r5   rI   rJ   s       r   r   zApproximateGELU.__init__   s5    IfgD999			r   xr   c                 `    |                      |          }|t          j        d|z            z  S )NgZd;?)r8   r)   sigmoid)r   r]   s     r   r$   zApproximateGELU.forward   s*    IIaLL5=++++r   rY   )r%   r&   r'   r(   rC   rE   r   r)   r*   r$   r+   r,   s   @r   r[   r[   ~   s         : :s :S : : : : : : :, ,%, , , , , , , , ,r   r[   )r)   torch.nn.functionalr   
functionalr    utilsr   utils.import_utilsr   rS   SiLUMishr.   ReLUr   rD   Moduler   r   rG   r[    r   r   <module>ri      s                          7 7 7 7 7 7   RWYYBGIIBGIIBGIIBGII  G3 G29 G G G G"	F 	F 	F 	F 	Fry 	F 	F 	F    29   83 3 3 3 3BI 3 3 3B, , , , ,bi , , , , ,r   