o
     Ǝi	                     @  sH  d Z ddlmZ ddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddlZddlZddlZddlZddlmZ ddlmZ ddlmZmZmZmZmZ ddlZddlmZ ddlZddlm Z  ddl!m"Z" dd	l#m$Z$ dd
lm%Z% ddl&m'Z' ddl(m)Z)m*Z* ddl+m,Z, ddlm-Z-m.Z. erddl/m0Z0 ddl1m2Z2 ddl3m4Z4 e
5e6Z7edZ8edZ9e9: Z;e;rddl<Z=g Z>dZ?dZ@e;rg dZ>e=jAjBjCD EddZ@dFdd e>D Z?g dZGG dd dZHd{d!d"ZId#ZJG d$d% d%ZKejLd{d&d'ZMd(d)d|d,d-ZNd(d)d|d.d/ZOd{d0d1ZPd}d4d5ZQG d6d7 d7eRZSd~d;d<ZT	(	(dddCdDZU	(dd(d(dEddIdJZVddLdMZWddRdSZXddTdUZY	(dd(d(dEddXdYZZdd_d`Z[ddcddZ\e\ej]Z^e\e_deZ`e\dZae\d(Zbe\d(ZcG dfdg dgZdG dhdi diZeG djdk dkZf	l		dddudvZgddydzZhdS )a  
Debug utilities for TorchDynamo compilation and execution.

This module provides various debugging tools and utilities for TorchDynamo, including:

- Minification support for reducing test cases while preserving bugs
- Input/output handling via InputReader and InputWriter for reproducible testing
- Accuracy checking between original and compiled models
- Neural network module string conversion via NNModuleToString
- Profiling tools and system information collection
- Buck build system integration for Meta-internal testing

Key classes:
- InputReader/InputWriter: Handle serialization of model inputs/outputs
- NNModuleToString: Converts nn.Modules to string representations
- BuckTargetWriter: Manages Buck build system integration
    )annotationsN)Counter)import_module)AnyCallableOptionalTYPE_CHECKINGTypeVar)Tensor)rand_strided)normalize_path_separator)is_float_dtype)StorageWeakRef)ContentStoreReaderContentStoreWriter   )config)clone_inputsget_debug_dir)Sequence)tqdm)UntypedStorageTztorch._inductor.config )z1//caffe2/torch/fb/sparsenn:sparsenn_operators_gpuz-//caffe2/torch/fb/sparsenn:sparsenn_operatorsz///deeplearning/fbgemm/fbgemm_gpu:sparse_ops_cpuz+//deeplearning/fbgemm/fbgemm_gpu:sparse_opszfbcode://
c                 C     g | ]}d | dqS )ztorch.ops.load_library("z") .0xr   r   U/home/jeff/fluffinator/venv/lib/python3.10/site-packages/torch/_dynamo/debug_utils.py
<listcomp>S       r"   )buck2runz@mode/dev-nosanc                   @  s,   e Zd ZdddZdddZddddZdS )BuckTargetWriterfilenamestrreturnNonec                 C  s   t jt j|\| _| _| jdd| _| jdd d| j | _| j| jdd  | _| jdd  | _| j}||dd  dd  }d| d	| j | _	d S )
Nz.pyr   /.zfbcode.   zfbcode/r   :)
ospathsplitabspathsubdirpy_filereplacetargetfindcmd_line_path)selfr'   tmpr   r   r!   __init__Z   s   zBuckTargetWriter.__init__c                 C  sD   d dd tD }td| j d| j dt d| d| j d	S )
Nr   c                 S  r   )z	        "z",r   r   r   r   r!   r"   i   r#   z*BuckTargetWriter.build.<locals>.<listcomp>za
load("@fbcode_macros//build_defs:python_binary.bzl", "python_binary")

python_binary(
    name="z",
    srcs = ["z"],
    compile = False,
    deps = [
        "//caffe2:torch",
        "//caffe2:libtorch",
        "//caffe2/functorch:functorch",
        "//triton:triton",
        "z",
    ],
    cpp_deps = [
z
    ],
    main_module = "z",
    par_style = "xar",
)
)join
extra_depstextwrapdedentr6   r4   
cur_targetr0   )r9   extra_cpp_depsr   r   r!   buildh   s   zBuckTargetWriter.buildT	print_msgbool	list[str]c                 C  sn   t j| jd}t|d}||   W d    n1 sw   Y  t| jg }|r5t	
dd| |S )NTARGETSwzFFound an example that reproduces the error. Run this cmd to repro - %s )r/   r0   r<   r3   openwriterB   BUCK_CMD_PREFIXr8   logwarning)r9   rC   target_filefd	cmd_splitr   r   r!   rJ      s   zBuckTargetWriter.writeN)r'   r(   r)   r*   r)   r(   )T)rC   rD   r)   rE   )__name__
__module____qualname__r;   rB   rJ   r   r   r   r!   r&   Y   s    

r&   r)   r(   c                  C  sL   t jt d} | d u rt  dt  } t j| s$t j	| dd | S )Nminifierz
/minifier_T)exist_ok)
r/   r0   r<   r   tempfile
gettempdirgetpassgetuserexistsmakedirs)r0   r   r   r!   minifier_dir   s   r]      c                   @  s   e Zd Zejjejjejjejjejj	ejj
ejjejjejjejjejjejjejjejjejjejjejjejjejjejjejjgZedddZeddd	Zd
S )NNModuleToStringgmtorch.fx.GraphModuler)   rD   c                 C  sL   t  }|  D ]\}}t|tjvr|| qt|dkr$td| dS )Nr   z-We have not tested reprs of some modules - %sT)	setnamed_childrentyper_   
safe_reprsaddlenrL   rM   )r`   cant_convert_moduler   r   r!   can_convert_to_string   s   
z&NNModuleToString.can_convert_to_stringr(   c                 C  s  ddl m} d}td}|  D ]+\}}|  }t| d }|d ur-|jr-| d}||d  d| d| d	7 }q| j	
 D ]X\}}	|	d u rKqB|	 tkrcdd
lm}
 |
jtks^J t|	}n t|	rvdt|	j d|	j d}ndt|	j d|	j d}|	jr| d}||d  d| d| d7 }qB| j
 D ].\}}|d u rqd}|jrd}dt|j d|j | d}||d  d| d| d	7 }q||| jd d	7 }|S )Nr   )
_addindent    z
            from torch.nn import *
            class Repro(torch.nn.Module):
                def __init__(self) -> None:
                    super().__init__()
            z.cuda()   zself.z = r   )
PRINT_OPTSztorch.randn(z, dtype=)ztorch.randint(1, size=zself.register_buffer('z', z)
r   z, device="cuda"ztorch.nn.Parameter(torch.randn(z))r^   )torch.nn.modules.modulerl   r>   r?   rc   __repr__next
parametersis_cuda_buffersitemsnumelMAX_CONSTANT_NUMEL_INLINEtorch._tensor_strro   	thresholdreprtorchis_floating_pointlistshapedtype_parameterscode)r`   rl   tab	model_strmodule_namerj   
module_strexample_parambuffer_namebufferro   
tensor_str
param_nameparammaybe_devicer   r   r!   convert   sJ   	

 


 	zNNModuleToString.convertN)r`   ra   r)   rD   )r`   ra   r)   r(   )rR   rS   rT   r}   nnLinearConv1dConv2dConv3dBatchNorm1dBatchNorm2dBatchNorm3d	LayerNormDropoutSoftmaxReLUGELUIdentity	MaxPool2d	EmbeddingTanhConvTranspose1dGLULSTMFlattenAdaptiveAvgPool2dre   staticmethodrk   r   r   r   r   r!   r_      s6    r_   c               	   C  s   t j sdS d} z!tddg}| d}ddd |D }| | d7 } W n ttj	fy:   | d	7 } Y nw t
d
d tt j D }| d7 } | D ]\}}| d| d| d7 } qQ| d7 } | S )Nz:# torch.cuda.is_available()==False, no GPU info collected
z# CUDA Info: 
nvccz	--versionr   r   c                 S  s    g | ]}|d vrd| dqS ))r   #  
r   )r   sr   r   r!   r"     s     z-_cuda_system_info_comment.<locals>.<listcomp>z# nvcc not found
c                 s  s    | ]	}t j|V  qd S N)r}   cudaget_device_name)r   ir   r   r!   	<genexpr>  s    
z,_cuda_system_info_comment.<locals>.<genexpr>z# GPU Hardware Info: 
r   z : r   )r}   r   is_available
subprocesscheck_outputdecoder1   r<   FileNotFoundErrorCalledProcessErrorr   rangedevice_countrw   )r   cuda_version_outcuda_version_linescomment	gpu_namesnamecountr   r   r!   _cuda_system_info_comment  s&   
r   F)stable_outputr   rD   c                   sZ   | rdS g d g dd fdd	fd
dt j D }d|}td| dS )zl
    Generate a string configuration for environment variables related to Dynamo, Inductor, and Triton.
    z+# env var omitted due to stable_output=True)TORCHDYNAMOINDUCTORTRITON)TRITON_LIBDEVICE_PATHTRITON_PTXAS_PATHTRITON_LIBCUDA_PATHkeyr(   r)   rD   c                   s   t  fddD o vS )Nc                 3  s    | ]}| v V  qd S r   r   )r   stringr   r   r!   r   &  s    z;generate_env_vars_string.<locals>.filter.<locals>.<genexpr>)anyr   )
allow_list	skip_listr   r!   filter%  s   z(generate_env_vars_string.<locals>.filterc                   s*   g | ]\}} |rd | d| dqS )zos.environ['z'] = ''r   r   r   value)r   r   r!   r"   (  s    z,generate_env_vars_string.<locals>.<listcomp>r   z
import os
z
    N)r   r(   r)   rD   )r/   environrw   r<   r   )r   config_linesconfig_stringr   )r   r   r   r!   generate_env_vars_string  s   


r   c              	   C  s\   dd l }dd l}| rdS |jjj }d|jj  d|jj  d|j	j  d| d	S )Nr   z*# config omitted due to stable_output=Truez~import torch._dynamo.config
import torch._inductor.config
import torch._functorch.config
import torch.fx.experimental._config
r   )
torch._functorch.configtorch._inductor.configfxexperimental_configcodegen_config_dynamor   	_inductor
_functorch)r   r}   experimental_configr   r   r!   generate_config_string4  s   


r   c                   C  s   t jt dS )Nzminifier_launcher.py)r/   r0   r<   r]   r   r   r   r!   get_minifier_repro_pathH  s   r   contentsr*   c              
   C  s   t  }td| trt|  zt|d}||  W d    W d S 1 s)w   Y  W d S  tyF } z
td t	d|d }~ww )NzWriting minified repro to:
%srG   r   z(Could not write to {minified_repro_path})
r   rL   rM   use_buckr&   rJ   rI   OSError	exceptionNotImplementedError)r   minified_repro_pathrO   er   r   r!   helper_for_dump_minifyL  s   &

r   c                   @  s   e Zd ZdS )AccuracyErrorN)rR   rS   rT   r   r   r   r!   r   [  s    r   example_inputsSequence[Any]	list[Any]c                 C  sB   t | }tt| D ]}t|| tjr|| | | j q
|S )z
    This clone inputs is different from utils clone_input. In case of minifier,
    all the tensors are leaf tensors while creating a new graph. So, we set the
    requires_grad field w/o checking the leafness of the tensor.
    )r   r   rg   
isinstancer}   r
   requires_grad_requires_grad)r   cloned_inputsidxr   r   r!   clone_inputs_retaining_gradness_  s   r   r`   ra   argsonly_fwddisable_cloner   c           	      C  s   ddl m}m}m} t| } |st|}t| dr| d t	| ddr)| |n| | }|r1|S ||r=||}|
  || |d|S )z
    Runs a forward and possibly backward iteration for a given mod and args.

    When disable_clone is True, we will use args as-is without cloning.
    This is higher fidelity but we may destroy the args in the process.
    r   )collect_resultsreduce_to_scalar_lossrequires_bwd_pass	zero_gradT_boxed_callFN)testingr   r   r   copydeepcopyr   hasattrr   getattrbackward)	r`   r   r   r   r   r   r   outlossr   r   r!   run_fwd_maybe_bwdl  s   


r  require_fp64ignore_non_fpopt_gmr  r  c                C  s   ddl m} t| ||}d}tjr:ztt| t|\}	}
t|	|
|}W n t	y9   |r2t
dtd Y nw zt|||}W n t	yQ   td Y dS w ||||tjd|d}|S )	aa  
    Check two models have same accuracy.

    require_fp64: if True, raise an error if we unable to calculate the fp64 reference
    ignore_non_fp: if True, do not compare outputs which are not floating point.  This
        is mostly useful for the minifier (which wants to avoid quantizing floating point
        error into integer/boolean error)
    r   )sameNzfCould not generate fp64 outputs, workaround with torch._dynamo.config.same_two_models_use_fp64 = FalsezCould not generate fp64 outputszWhile minifying the program in accuracy minification mode, ran into a runtime exception which is likely an unrelated issue. Skipping this graph.T)tol	equal_nanr  )utilsr  r  r   same_two_models_use_fp64cast_to_fp64r   r   r   	ExceptionRuntimeErrorrL   rM   r   repro_tolerance)r`   r  r   r   r  r  r  reffp64_ref
fp64_modelfp64_examplesrespassingr   r   r!   same_two_models  sB   
r  modelc                 C  s   | j jD ]Q}|jdkr5|jtjjjjkr5t	|j
dksJ t|j
d r5|j
d tjkr5|j
d tjf|_
|jdkrU|jd}|d urUt|rUt|j}tj|d< ||_q| j   |   | S )Ncall_functionrn   r   r   r   )graphnodesopr6   r}   opsprimsconvert_element_typedefaultrg   r   r   float64kwargsgetdictlint	recompile)r  noder   
new_kwargsr   r   r!   cast_dtype_args_to_fp64  s    




r)  r   torch.dtypeinputs&tuple[torch.fx.GraphModule, list[Any]]c                   sB   ddl m} | } tjkrt|}| fdd|}||fS )Nr   )tree_mapc                   s"   t | tjr|  r|  S | S r   )r   r}   r
   r~   tor    r   r   r!   <lambda>  s
   
zcast_to.<locals>.<lambda>)torch.utils._pytreer-  r.  r}   r!  r)  )r   r  r+  r-  r   r0  r!   cast_to  s   


r3  c                 C  s   t tj| |S r   )r3  r}   r!  )r  r+  r   r   r!   r    s   r  compiler_fnACallable[[torch.fx.GraphModule, list[Any]], torch.fx.GraphModule]c                C  sL   z|t | t|}t| |||||d W S  ty%   td Y dS w )Nr  zWhile minifying the program in accuracy minification mode, ran into a runtime exception which is likely an unrelated issue. Skipping this graphF)r   r   r   r  r  rL   r   )r`   r   r4  r   r  r  compiled_gmr   r   r!   backend_accuracy_fails  s$   	
r7  stride(Optional[torch._prims_common.StrideType]r   torch._prims_common.ShapeTypetorch._prims_common.StrideTypec                C  s   | d ur| S t |S r   )r  make_contiguous_strides_for)r8  r   r   r   r!   _stride_or_default  s   r=  dCallable[[Optional[T]], T]c                   s    fddS )Nc                   s   | d ur| S  S r   r   r/  r>  r   r!   r1  '  s    z_mk_defaulter.<locals>.<lambda>r   r@  r   r@  r!   _mk_defaulter&  s   rA  cpuc                   @  s<   e Zd ZdddZddddddZdddZdddZdS )NopInputReaderr)   r*   c                 C  s
   d| _ d S )Nr   total)r9   r   r   r!   r;   2  s   
zNopInputReader.__init__Ndevice
dtype_hintstorage_hashOptional[str]nbytesintrG  ,Optional[torch._prims_common.DeviceLikeType]rH  Optional[torch.dtype]c                C  s   |  j d7  _ d S )Nr   rD  )r9   rI  rK  rG  rH  r   r   r!   storage5  s   zNopInputReader.storager   r   r"  Optional[torch.Tensor]c                 O     d S r   r   r9   r   r"  r   r   r!   tensor?     zNopInputReader.tensorOptional[int]c                 O  rQ  r   r   rR  r   r   r!   symintB  rT  zNopInputReader.symintr)   r*   )
rI  rJ  rK  rL  rG  rM  rH  rN  r)   r*   )r   r   r"  r   r)   rP  )r   r   r"  r   r)   rU  rR   rS   rT   r;   rO  rS  rV  r   r   r   r!   rC  1  s    


rC  c                   @  sT   e Zd Zd)ddd*ddZddd	d+ddZ	d)dddddd,d$d%Zd-d'd(ZdS ).InputReaderN)pbarsave_dirrJ  rZ  Optional[tqdm]c                C  s8   |d u r	t d |d urt|nd | _g | _|| _d S )Nz0no save_dir specified, will generate random data)rL   rM   r   storer   rZ  )r9   r[  rZ  r   r   r!   r;   I  s
   

zInputReader.__init__rF  rI  rK  rL  rG  rM  rH  rN  r)   r   c                C  s   | j d ur| j d t|}t|}| jd ur=|d ur=z| j|}W n	 ty-   Y nw ||jkr;t	d||j |S t	d| ||j
 f}td |d}t|||| S )Nr   zdevice mismatch: %s != %sz1could not load %s, generating random data insteadr   )rZ  update_device_or_default_dtype_or_defaultr]  read_storager   rG  rL   rM   itemsizer=  r   untyped_storage)r9   rI  rK  rG  rH  rO  r   r8  r   r   r!   rO  T  s"   

zInputReader.storage)storage_offsetr   r   is_leafrO  r   r:  r8  r9  re  rU  r   r   Optional[bool]rf  metadatar   torch.Tensorc          
      K  s  t ||d}t|}t|}t|}t|}tjg ||j|d}	t  |		|||| W d    n1 s7w   Y  |sut
  |	jtjd}	W d    n1 sTw   Y  t  |		|||| W d    n1 spw   Y  tjj|	|ksJ tj|	| | j|	 |	S )Nr^  )r   rG  r   )memory_format)r=  _storage_offset_or_defaultra  _is_leaf_or_default_requires_grad_or_defaultr}   rS  rG  no_gradset_enable_gradclonepreserve_format_subclasses
meta_utilssafe_is_leaf_utilsset_tensor_metadatar   append)
r9   rO  r   r8  re  r   r   rf  rh  tr   r   r!   rS  q  s,   



zInputReader.tensorvalc                 C  s   | j | |S r   )r   rx  )r9   rz  r   r   r!   rV    s   zInputReader.symintr   )r[  rJ  rZ  r\  )
rI  rJ  rK  rL  rG  rM  rH  rN  r)   r   )rO  r   r   r:  r8  r9  re  rU  r   rN  r   rg  rf  rg  rh  r   r)   ri  )rz  r   r)   r   rX  r   r   r   r!   rY  H  s    !!rY  c                   @  s`   e Zd Zddd'd	d
Zd(ddZdddd)ddZd*ddZd+d d!Zd,d"d#Zd-d%d&Z	dS ).InputWriterFstable_hashr[  rJ  r}  rD   r)   r*   c                C  s:   g | _ t | _|| _|d urt||dnd | _i | _d S )Nr|  )_lines	itertoolsr   storage_counterr[  r   r]  seen_storages)r9   r[  r}  r   r   r!   r;     s   

zInputWriter.__init__rE   c                 C  s*   dg}| dd | jD  |d |S )Nzdef load_args(reader):c                 s  s    | ]}d | V  qdS )rm   Nr   )r   lr   r   r!   r     s    z$InputWriter.lines.<locals>.<genexpr>zload_args._version = 0)extendr~  rx  )r9   rr   r   r!   lines  s
   
zInputWriter.linesN)device_hintrH  rd  r   r  rM  rH  rN  r(   c             
   C  s   t |}| j|}|d ur|S dt| j }d}td t|kr'd|}d}|j}|jdkr9|d us7J |}td |krDd|}|	 }	d }
| j
d ur[|jjdkr[| j
|}
| j| d|
d|	| | d || j|< |S )	Nbufr   z, dtype_hint=metaz	, device=z = reader.storage(, rp   )r   r  r#  rs   r  ra  rG  rd   r`  rK  r]  write_storager~  rx  )r9   rd  r  rH  wsvmaybe_dtype_hintr   rG  rK  rI  r   r   r!   rO    s0   



zInputWriter.storager   ry  ri  c           	   	   C  sN  ddl m}m} | j| |j|jd}g }||td |jd|	 s/|
tt|	  td |jkr?|
d|j |td | ksS|
d|  tj|}|rg|dd | D  td |jkrw|
d	|j tjj|}td |kr|
d
| | j
dd|tt|jg| d|   d S )Nr   )statically_known_truesym_eq)rH  r  r^  zdtype=zstorage_offset=c                 s  s"    | ]\}}| d |V  qdS )=Nr   )r   kr  r   r   r!   r     s     z%InputWriter.tensor.<locals>.<genexpr>zrequires_grad=zis_leaf=zreader.tensor(r  )  # )%torch.fx.experimental.symbolic_shapesr  r  rO  rd  r   rG  r=  r   r8  rx  r(   tuplera  rk  re  r}   rv  get_tensor_metadatar  rw   rm  r   rs  rt  ru  rl  r~  r<   )	r9   r   ry  r  r  rO  r   tensor_metadatarf  r   r   r!   rS    s<   zInputWriter.tensorargr   c                 C  s   | j d| dt|  t|ttfrV| j d t|D ].\}}| d| d}t|tjr8| 	|| qt|t
tjfrG| || q| || q| j d d S d S )Nr   z# was unsupported type for dumping: z"""[])r~  rx  rd   r   r   r  	enumerater}   r
   rS  rL  SymIntrV  unsupported)r9   r   r  r   aname_ir   r   r!   r    s   zInputWriter.unsupportedc                 C  s   | j d|d| d d S )Nzreader.const(r  z!, filtered out during compilation)r~  rx  )r9   r   r   r   r!   const  s   zInputWriter.constrz  c                 C  s0   t |tjr
|jj}| jd|d|  d S )Nzreader.symint(r  )r   r}   r  r'  hintr~  rx  )r9   r   rz  r   r   r!   rV    s   zInputWriter.symint)r[  rJ  r}  rD   r)   r*   )r)   rE   )rd  r   r  rM  rH  rN  r)   r(   )r   r(   ry  ri  r)   r*   )r   r(   r  r   r)   r*   )r   r(   r)   r*   )r   r(   rz  r   r)   r*   )
rR   rS   rT   r;   r  rO  rS  r  r  rV  r   r   r   r!   r{    s    

"
 
r{  r   func&Callable[[list[Tensor]], list[Tensor]]rG  
sym_shapesOptional[dict[str, int]]default_sym_shaperU  dict[str, Any]c                   s|  ddl m} dd | D }d| }t| }d| d}d| d	}	d
}
G dd d}i }|p5i d fdddfdd}| j}| D ]:\}}|dkrVqMt	|	|}|rv|
 \}}t|d}|| }|||||< t	|
|}|r|d||< qMdt| jv r| }||d< t||D ]}|
 \}}}}t|d}|| }t||||| q|S ) a  
    Takes in a function which has been printed with print_readable() and constructs kwargs to run it.

    Handles Tensor inputs, Symints, and a graph module which might have tensor constants.

    Consider a function `forward` defined as follows:

    def forward(self, primals_1: "f32[1001, 6]", primals_2: "f32[s0]", primals_3: "Sym(s0)",):
        _tensor_constant0: "i64[4190]" = self._tensor_constant0
        # Further implementation

    kwargs = aot_graph_input_parser(forward)
    forward(**kwargs)
    r   )dtype_abbrsc                 S  s   i | ]\}}||qS r   r   r   r   r   r!   
<dictcomp>6  s    
z*aot_graph_input_parser.<locals>.<dictcomp>|z(_tensor_constant\d+): \"(z0)\[\s*(.*?)\s*\]\" = self\.(_tensor_constant\d+)(z)\[\s*(.*?)\s*\]zSym\((s\d+)\)c                   @  s   e Zd ZdZdS )z/aot_graph_input_parser.<locals>.TensorContainerz#Container for tensors as attributesN)rR   rS   rT   __doc__r   r   r   r!   TensorContainerC  s    r  rV  r(   r)   rL  c                   s,   t  v p	d u fdd  S )Nc                     s
     dS )Nz; not in symbolic_shapes and default sym shape not passed inr   r   rV  r   r!   r1  N  s   
 z=aot_graph_input_parser.<locals>.get_sym_int.<locals>.<lambda>)r}   _checkr#  r  )r  sym_shapes_dictr  r!   get_sym_intK  s
   
z+aot_graph_input_parser.<locals>.get_sym_intr   r:  r   r*  r
   c           
        s   g }g }t | D ]$\}}| }d|v r#|}|| || q|r,|t| q|jr3tjntj}||| d}|D ]	}	tj	||	 q?|S )Nr   )r   rG  )
r  striprx  rL  r~   r}   randnzerosr   mark_dynamic)
r   r   resolved_shapedynamic_dimsr   dimr   constructorr  r>  )rG  r  r   r!   
gen_tensorR  s    
z*aot_graph_input_parser.<locals>.gen_tensor,r   r9   N)rV  r(   r)   rL  )r   r:  r   r*  r)   r
   )torch.utils._dtype_abbrsr  rw   r<   valuesinspect	getsource__annotations__researchgroupsr  r1   group	signaturert   finditersetattr)r  rG  r  r  r  	dtype_mapdtype_patternsourcetensor_assignment_regextensor_regexsym_shape_regexr  r"  r  r   r   
annotationmatch	data_type	shape_strr   r   	container	attr_nameri   r   )r  rG  r  r  r!   aot_graph_input_parser  sJ   
r  r'   Callable[[T], T]c                   sH   t  tjtj  d
fdd}d fdd}t| |S )z
    Decorator to cProfile a given function and save the result to disk on process exit.

    Args:
        filename: filename to save profile to
    fnr   r)   c                   s   t  d fdd}|S )Nr   r   r"  r)   c                    s,      z | i |W   S   w r   )enabledisable)r   r"  )r  profr   r!   wrapper  s   z3profile_to_file.<locals>.decorator.<locals>.wrapper)r   r   r"  r   r)   r   )	functoolswraps)r  r  )r  )r  r!   	decorator  s   z"profile_to_file.<locals>.decoratorr*   c                	     s.      tjtd  d  d d S )Nz!                Wrote profile to z+, view with:

                    snakeviz z

                )
dump_statssysstderrrJ   r>   r?   r   r'   r  r   r!   save_it  s   
z profile_to_file.<locals>.save_itN)r  r   r)   r   rW  )cProfileProfiler/   r0   r2   
expanduseratexitregister)r'   r  r  r   r  r!   profile_to_file  s   
r  rQ   )r   rD   r)   r(   )r   r(   r)   r*   )r   r   r)   r   )FF)
r`   ra   r   r   r   rD   r   rD   r)   r   )F)r`   ra   r  ra   r   r   r   rD   r  rD   r  rD   r)   rD   )r  ra   r)   ra   )r   r*  r  ra   r+  r   r)   r,  )r  ra   r+  r   r)   r,  )r`   ra   r   r   r4  r5  r   rD   r  rD   r  rD   r)   rD   )r8  r9  r   r:  r)   r;  )r>  r   r)   r?  )r   NN)
r  r  rG  r(   r  r  r  rU  r)   r  )r'   r(   r)   r  )ir  
__future__r   r  r   r  r  rY   r  r  loggingr/   r  r   r  rW   r>   collectionsr   	importlibr   typingr   r   r   r   r	   r}   torch._prims_common_prims_commonr  torch._subclasses.meta_utilsr
   torch._dynamo.testingr   torch._inductor.cpp_builderr   r    torch.multiprocessing.reductionsr   torch.utils._content_storer   r   r   r   r   r   collections.abcr   	torch.hubr   torch.storager   	getLoggerrR   rL   r   inductor_config	is_fbcoder   libfb.py.build_infolibfbr=   extra_importsr@   py
build_info	BuildInfoget_build_ruler5   r<   rK   r&   r]   ry   r_   cacher   r   r   r   r   r  r   r   r  r  r)  r3  r  r7  r=  rA  float32ra  rG  r`  rk  rm  rl  rC  rY  r{  r  r  r   r   r   r!   <module>   s    

7	f


$
:



)

[~e