
    0;ji*                         d Z ddlZddlmZmZmZ ddlZddlmZm	Z	  G d d          Z
dedej        d	ed
eeeef                  dej        f
dZd Z e             dS )ao  
Specialization of einops for torch.

Unfortunately, torch's jit scripting mechanism isn't strong enough,
and to have scripting supported at least for layers,
a number of additional moves is needed.

Design of main operations (dynamic resolution by lookup) is unlikely
to be implemented by torch.jit.script,
but torch.compile seems to work with operations just fine.
    N)DictListTuple)TransformRecipe _reconstruct_from_shape_uncachedc                   R   e Zd ZdZedej        dedee	         fd            Z
edee	         fd            Zedeej                 fd	            Zed
ee	         fd            Zede	dee	e	f         fd            Zed             Zed             Zedee	         fd            ZdS )TorchJitBackendz
    Completely static backend that mimics part of normal backend functionality
    but restricted to be within torchscript.
    x	operationreduced_axesc                    |dk    r|                      |          S |dk    r|                     |          S |dk    r|                     |          S |dk    r|                     |          S |dk    r@t	          t          |                    d d d         D ]}|                     |          } | S t          d|          )	Nmin)dimmaxsummeanprodzUnknown reduction )aminamaxr   r   listsortedr   NotImplementedError)r
   r   r   is       P/root/voice-cloning/.venv/lib/python3.11/site-packages/einops/_torch_specific.pyreducezTorchJitBackend.reduce   s    66l6+++%66l6+++%55\5***&  66l6+++&  &..//"5 " "FFqFMMH%&:IFFF    axesc                 ,    |                      |          S N)permute)r
   r   s     r   	transposezTorchJitBackend.transpose+   s    yyr   tensorsc                 *    t          j        |           S r    )torchstack)r#   s    r   stack_on_zeroth_dimensionz)TorchJitBackend.stack_on_zeroth_dimension/   s    {7###r   repeatsc                 ,    |                      |          S r    )repeat)r
   r(   s     r   tilezTorchJitBackend.tile3   s    xx   r   n_axespos2lenc                     dg|z  }|                                 D ]\  }}t          j        | |          } |||<    |                     |          S )Nr   )itemsr%   	unsqueezeexpand)r
   r,   r-   r(   axis_positionaxis_lengths         r   add_axeszTorchJitBackend.add_axes7   sY    $-*1--// 	1 	1&M;=11A%0GM""xx   r   c                 l    | j         t          j        t          j        t          j        t          j        fv S r    )dtyper%   float16float32float64bfloat16r
   s    r   is_float_typezTorchJitBackend.is_float_type?   s     w5=%-WWWr   c                     | j         S r    )shaper;   s    r   r>   zTorchJitBackend.shapeC   s	    wr   r>   c                 ,    |                      |          S r    )reshape)r
   r>   s     r   r@   zTorchJitBackend.reshapeG   s    yyr   N)__name__
__module____qualname____doc__staticmethodr%   Tensorstrr   intr   r"   r'   r+   r   r4   r<   r>   r@    r   r   r	   r	      s~        
 G%, G3 Gd3i G G G \G  49    \ $4+= $ $ $ \$ !c ! ! ! \! !C !$sCx. ! ! ! \! X X \X   \  $s)       \     r   r	   recipetensorreduction_type	axes_dimsreturnc                    t           }t          | |                    |          |          \  }}}}}	}
||                    ||          }||                    ||          }t          |          dk    r|                    |||          }t          |          dk    r|                    ||
|          }|	|                    ||	          }|S )N)rM   r   )r   r   )r,   r-   )r	   r   r>   r@   r"   lenr   r4   )rJ   rK   rL   rM   backendinit_shapesaxes_reorderingr   
added_axesfinal_shapesn_axes_w_addeds              r   apply_for_scriptable_torchrW   M   s     G 	)v1F1FR[\\\55"""6?;;
<1.|\\
:!!&!TT66Mr   c                  v   t          t          d          rt          j        d         dk     rd S 	 ddlm}  n+# t
          $ r t          j        dt                     Y d S w xY wddl	m
}m}m}m} ddlm}m}  | |            | |            | |            | |            | |            | |           d	ad S )
N__version__r   2)allow_in_graphzHallow_ops_in_compiled_graph failed to import torch: ensure pytorch >=2.0   )	rearranger   r*   einsum)packunpackT)hasattrr%   rY   torch._dynamor[   ImportErrorwarningswarnImportWarningeinopsr]   r   r*   r^   packingr_   r`   #_ops_were_registered_in_torchdynamo)r[   r]   r   r*   r^   r_   r`   s          r   allow_ops_in_compiled_graphrj   f   s'   um$$ ):1)=)C)C0000000   `boppp :99999999999%%%%%%%%N9N6N6N6N4N6 +/'''s   6 $AA)rD   rd   typingr   r   r   r%   einops.einopsr   r   r	   rF   rG   rH   rW   rj   rI   r   r   <module>rm      s   
 
  $ $ $ $ $ $ $ $ $ $  K K K K K K K K5  5  5  5  5  5  5  5 r%*\CFSWX]^acf^fXgSh
\   2/ / /4       r   