
    9i$                        d dl Z d dlZ d dlZd dlZd dlmZ d dlZd dlmZ d dl	m
Z
 ddlmZmZ ddlmZmZ 	 d dlmc mZ dZej.                  j1                         Zd
 ZdefdZd Zd Zd Zd Z ed)d       Z!d Z"d*dZ#d Z$d Z%d Z&d Z'd Z(d Z)d Z*d Z+d Z,d Z-d Z.d Z/d  Z0d! Z1d" Z2d# Z3d$ Z4d% Z5d& Z6ed*d'       Z7ed*d(       Z8y# e$ r d	ZY w xY w)+    N)	lru_cache)version)parse   )parse_flag_from_envstr_to_bool)compare_versionsis_torch_versionTFc                     t         j                  j                  |       d u}|r!	 t         j                  j                  |       }yy # t         j                  j                  $ r Y yw xY w)NTF)	importlibutil	find_specmetadataPackageNotFoundError)pkg_namepackage_exists_s      b/var/www/html/backtest/airagagent/rag_env/lib/python3.12/site-packages/accelerate/utils/imports.py_is_package_availabler   )   sa    ^^--h7tCN	""++H5A  !!66 		s   A A&%A&returnc                      t         S )N)_torch_distributed_available     r   is_torch_distributed_availabler   4   s    ''r   c                      	 	 t        j                  j	                  d      d uxs! t        j                  j	                  d      d uS # t         $ r t        d       Y Zw xY w)NzIntel(R) oneCCL Bindings for PyTorch* is required to run DDP on Intel(R) GPUs, but it is not detected. If you see "ValueError: Invalid backend: 'ccl'" error, please install Intel(R) oneCCL Bindings for PyTorch*.	torch_ccloneccl_bindings_for_pytorch)ImportErrorprintr   r   r   r   r   r   is_ccl_availabler!   8   sd    
 	  -T9 	O>>##$AB$N  
&	

s   AAc                  @    t         j                  j                  d      S )Noneccl_bind_pt)r   r   r   r   r   r   get_ccl_versionr$   G   s    %%&677r   c                      t        d      S )Ntransformer_enginer   r   r   r   is_fp8_availabler(   K   s     !566r   c                     	 t        d      t        j                  d<   t        j                  j                         } t        j                  j                  dd       | S # t        j                  j                  dd       w xY w)z
    Checks if `cuda` is available via an `nvml-based` check which won't trigger the drivers and leave cuda
    uninitialized.
    r   PYTORCH_NVML_BASED_CUDA_CHECKN)strosenvirontorchcudais_availablepop)	availables    r   is_cuda_availabler3   O   s\    
>69!f

23JJ++-	


6= 	

6=s   :A "B c                 ~    t               ry| rt        r	 t        j                         }yt        S # t        $ r Y yw xY w)zQChecks if `torch_xla` is installed and potentially if a TPU is in the environmentFT)r3   _tpu_availablexm
xla_deviceRuntimeError)check_devicer   s     r   is_tpu_availabler:   \   sB     MMO    s   0 	<<c                      t        d      S )N	deepspeedr'   r   r   r   is_deepspeed_availabler=   m        --r   c                     t               r|  S t        j                  j                         rt        j                  j	                         S y)z8Checks if bf16 is supported, optionally ignoring the TPUT)r:   r.   r/   r0   is_bf16_supported)
ignore_tpus    r   is_bf16_availablerB   q   s6    ~zz zz++--r   c                      t        d      } | r?t        j                  t        j                  j                  d            }t        |dd      S y)Nbitsandbytes>=z0.39.0Fr   r   r   r   r   r	   r   bnb_versions     r   is_4bit_bnb_availablerI   z   @    *>:NmmI$6$6$>$>~$NOT8<<r   c                      t        d      } | r?t        j                  t        j                  j                  d            }t        |dd      S y)NrD   rE   z0.37.2FrF   rG   s     r   is_8bit_bnb_availablerL      rJ   r   c                      t        d      S )NrD   r'   r   r   r   is_bnb_availablerN           00r   c                  l   t        t        j                  j                  dd            dk(  rZt        j
                  j                  d      d u} | r6	 t        t        j                  j                  d            }t        |dd      S y y # t        $ r"}t        j                  d|        Y d }~y	d }~ww xY w)
NACCELERATE_USE_MEGATRON_LMFalser   megatronzmegatron-lmrE   z2.2.0z)Parse Megatron version failed. Exception:F)r   r,   r-   getr   r   r   r   r   r   r	   	Exceptionwarningswarn)r   megatron_versiones      r   is_megatron_lm_availablerZ      s    2::>>">HIQN"11*=TI#(););)C)CM)R#S '(8$HH  O   I!MNs   4B 	B3B..B3c                      t        d      S )Nsafetensorsr'   r   r   r   is_safetensors_availabler]      s     //r   c                      t        d      S )Ntransformersr'   r   r   r   is_transformers_availabler`      rO   r   c                      t        d      S )Ndatasetsr'   r   r   r   is_datasets_availablerc           ,,r   c                      t        d      S )Ntimmr'   r   r   r   is_timm_availablerg           ((r   c                      t        d      } | r?t        j                  t        j                  j                  d            }t        |dd      S y)Naim<z4.0.0FrF   )r   aim_versions     r   is_aim_availablerm      s@    *51NmmI$6$6$>$>u$EFS'::r   c                  2    t        d      xs t        d      S )NtensorboardtensorboardXr'   r   r   r   is_tensorboard_availablerq      s     /X3H3XXr   c                      t        d      S )Nwandbr'   r   r   r   is_wandb_availablert           ))r   c                      t        d      S )Ncomet_mlr'   r   r   r   is_comet_ml_availablerx      rd   r   c                      t        d      S )Nboto3r'   r   r   r   is_boto3_availabler{      ru   r   c                      t        d      r@dt        j                  v r"t        j                  d       t        dd       S t        dd      S y)NrichACCELERATE_DISABLE_RICHz`ACCELERATE_DISABLE_RICH` is deprecated and will be removed in v0.22.0 and deactivated by default. Please use `ACCELERATE_ENABLE_RICH` if you wish to use `rich`.FACCELERATE_ENABLE_RICH)r   r,   r-   rV   rW   r   r   r   r   is_rich_availabler      sL    V$$

2MM t ++DeLLL"#;UCCr   c                      t        d      S )N	sagemakerr'   r   r   r   is_sagemaker_availabler      r>   r   c                      t        d      S )Ntqdmr'   r   r   r   is_tqdm_availabler      rh   r   c                      t        d      ryt        j                  j                  d      !	 t        j                  j	                  d      } yy# t        j                  j
                  $ r Y yw xY w)NmlflowTzmlflow-skinnyF)r   r   r   r   r   r   )r   s    r   is_mlflow_availabler      se    X&~~)5	""++O<A  !!66 		s   A A.-A.c                      t        dd      xrR t        j                  j                  j	                         xr( t        j                  j                  j                         S )NrE   1.12)r
   r.   backendsmpsr0   is_builtr   r   r   is_mps_availabler      sB    D&)qenn.@.@.M.M.OqTYTbTbTfTfToToTqqr   c            	      |   d } t         j                  j                  d      }t         j                  j	                  d      yd}	 t         j                  j                  d      } | |      } | |      }||k7  r t        j                  d| d| d| d	       yy
# t         j                  j
                  $ r Y yw xY w)Nc                     t        t        j                  |       j                        dz   t        t        j                  |       j                        z   S )N.)r+   r   r   majorminor)full_versions    r    get_major_and_minor_from_versionz;is_ipex_available.<locals>.get_major_and_minor_from_version   s;    7==.445;c'--P\B]BcBc>dddr   r.   intel_extension_for_pytorchFzN/AzIntel Extension for PyTorch z needs to work with PyTorch z.*, but PyTorch z? is found. Please switch to the matching version and run again.T)r   r   r   r   r   r   rV   rW   )r   _torch_version_ipex_versiontorch_major_and_minoripex_major_and_minors        r   is_ipex_availabler      s    e ''//8N~~ =>FM!**223PQ =^L;MJ 44*+?*@@\]q\r s*++jl	
  22 s   B B;:B;c                 l   t         j                  j                  d      t         j                  j                  d      yddl}ddl}| r5	 |j
                  j                         }|j
                  j                         S t        |d      xr |j
                  j                         S # t        $ r Y yw xY w)zQChecks if `torch_npu` is installed and potentially if a NPU is in the environmentr.   N	torch_npuFr   npu)
r   r   r   r.   r   r   device_countr0   r8   hasattr)r9   r.   r   r   s       r   is_npu_availabler      s     ~~(0INN4L4L[4Y4a			&&(A99))++ 5% =UYY%;%;%==  		s   3B' '	B32B3c                 <   t        dd      sy	 t               rddl}t        dd      ryyddl}| r5	 |j
                  j                         }|j
                  j                         S t        |d	      xr |j
                  j                         S # t        $ r Y yw xY w)
z$check if user disables it explicitlyACCELERATE_USE_XPUT)defaultFr   Nz<=r   xpu)
r   r   r.   r
   r   r   r   r0   r8   r   )r9   r.   r   r   s       r   is_xpu_availabler     s     3TBiD&)&			&&(A99))++ 5% =UYY%;%;%==  		s   3B 	BB)T)F)9r   importlib.metadatar,   rV   	functoolsr   r.   	packagingr   packaging.versionr   environmentr   r   versionsr	   r
   torch_xla.core.xla_modelcore	xla_modelr6   r5   r   distributedr0   r   r   boolr   r!   r$   r(   r3   r:   r=   rB   rI   rL   rN   rZ   r]   r`   rc   rg   rm   rq   rt   rx   r{   r   r   r   r   r   r   r   r   r   r   r   <module>r      s/     	     # 9 8))N  %00==? ( (87
   .1	01-)Y*-*.)
r. > >$ > >a  Ns   C CC