
     i"P                    J   S SK Jr  S SKrS SKrS SKJrJr  S SKrS SK	J
r
  \R                  " \5      r\(       a  S SKrS SKr          S                       S	S jjr          S                       S
S jjr  S         SS jjrg)    )annotationsN)TYPE_CHECKINGLiteral)Tensorc                   SSK nUb  Ub  [        S5      eUc  Uc  [        S5      eUc  US;   aH  U	(       a   UR                  " UR                  S   5      nOtUR                  " UR                  S   S5      nOSUS:X  aM  U	(       a#  UR
                  " UR                  S   S	-  5      nO#UR                  " UR                  S   S	-  S5      nUR                  U5        SnUnU R                  [        R                  [        R                  4;  a6  U(       a"  US
:w  a  U nX-  nO[        R                  S5        [        U UUUS9n OU(       a  [        R                  S5        [        R                  " 5       nUR!                  X5      u  nnUGb)  [        R"                  " U VVs/ s H3  nU Vs/ s H"  nUR%                  UR'                  5       5      PM$     snPM5     snn5      nUS:X  a)  [        R(                  " USS9R+                  [,        5      nOUR+                  [,        5      n[        R.                  " SUU5      n[        R0                  " U* 5      SS2SU24   nU[        R2                  " [5        U 5      5      SS2S4   U4   nU[        R2                  " [5        U 5      5      SS2S4   U4   n[        R                  " 5       U-
  n[7        [5        U 5      5       VVVs/ s H@  n[9        UU   UU   5       VVs/ s H  u  nn[-        U5      [;        U5      S.PM     snnPMB     snnnU4nU
(       a  / UQUP7nU$ s  snf s  snnf s  snnf s  snnnf )aF  
Performs semantic search using the FAISS library.

Rescoring will be performed if:
1. `rescore` is True
2. The query embeddings are not quantized
3. The corpus is quantized, i.e. the corpus precision is not float32
Only if these conditions are true, will we search for `top_k * rescore_multiplier` samples and then rescore to only
keep `top_k`.

Args:
    query_embeddings: Embeddings of the query sentences. Ideally not
        quantized to allow for rescoring.
    corpus_embeddings: Embeddings of the corpus sentences. Either
        `corpus_embeddings` or `corpus_index` should be used, not
        both. The embeddings can be quantized to "int8" or "binary"
        for more efficient search.
    corpus_index: FAISS index for the corpus sentences. Either
        `corpus_embeddings` or `corpus_index` should be used, not
        both.
    corpus_precision: Precision of the corpus embeddings. The
        options are "float32", "int8", or "binary". Default is
        "float32".
    top_k: Number of top results to retrieve. Default is 10.
    ranges: Ranges for quantization of embeddings. This is only used
        for int8 quantization, where the ranges refers to the
        minimum and maximum values for each dimension. So, it's a 2D
        array with shape (2, embedding_dim). Default is None, which
        means that the ranges will be calculated from the
        calibration embeddings.
    calibration_embeddings: Embeddings used for calibration during
        quantization. This is only used for int8 quantization, where
        the calibration embeddings can be used to compute ranges,
        i.e. the minimum and maximum values for each dimension.
        Default is None, which means that the ranges will be
        calculated from the query embeddings. This is not
        recommended.
    rescore: Whether to perform rescoring. Note that rescoring still
        will only be used if the query embeddings are not quantized
        and the corpus is quantized, i.e. the corpus precision is
        not "float32". Default is True.
    rescore_multiplier: Oversampling factor for rescoring. The code
        will now search `top_k * rescore_multiplier` samples and
        then rescore to only keep `top_k`. Default is 2.
    exact: Whether to use exact search or approximate search.
        Default is True.
    output_index: Whether to output the FAISS index used for the
        search. Default is False.

Returns:
    A tuple containing a list of search results and the time taken
    for the search. If `output_index` is True, the tuple will also
    contain the FAISS index used for the search.

Raises:
    ValueError: If both `corpus_embeddings` and `corpus_index` are
        provided or if neither is provided.

The list of search results is in the format: [[{"corpus_id": int, "score": float}, ...], ...]
The time taken for the search is a float value.
r   N@Only corpus_embeddings or corpus_index should be used, not both.8Either corpus_embeddings or corpus_index should be used.)float32uint8      ubinary   r
   zRescoring is enabled but the corpus is not quantized. Either pass `rescore=False` or quantize the corpus embeddings with `quantize_embeddings(embeddings, precision="...") `and pass `corpus_precision="..."` to `semantic_search_faiss`.	precisionrangescalibration_embeddingsRescoring is enabled but the query embeddings are quantized. Either pass `rescore=False` or don't quantize the query embeddings.axis
ij,ikj->ik	corpus_idscore)faiss
ValueErrorIndexFlatIPshapeIndexHNSWFlatIndexBinaryFlatIndexBinaryHNSWadddtypenpr   int8loggerwarningquantize_embeddingstimesearcharrayreconstructitem
unpackbitsastypeinteinsumargsortarangelenrangezipfloat)query_embeddingscorpus_embeddingscorpus_indexcorpus_precisiontop_kr   r   rescorerescore_multiplierexactoutput_indexr   rescore_embeddingskstart_tscoresindicesquery_indicesidxtop_k_embeddingsrescored_scoresrescored_indicesdelta_tquery_idr   neighboroutputss                              r/home/dmtnaga/Documents/work/airagagent/rag_env/lib/python3.13/site-packages/sentence_transformers/quantization.pysemantic_search_faissrQ      sB   T $)A[\\ \%9STT 33$001B1H1H1KL$223D3J3J13MrR*$445F5L5LQ5ORS5ST$445F5L5LQ5ORS5SUWX*+
 Abhh%889,%5"'T /&#9	
 
 O	

 iikG"))*:>OFG %88bijbiQ^mLmsl&&sxxz2mLbij
 y(!}}-=BGNNsS/66s; ))L2DFVW::&676E6	B"))C(8$9:1d7CEUUV 3/?+@!A!T'!JL\!\]iikG#G "#&6"78	

 9 (+6(+;WX=N'O'OOE8 "(meElC'O 9	
 		G *G*\*NA Mj,	
s0   "
M	
,)MM	
&M$M%MM	
Mc                t   SSK Jn  SSKJn  Ub  Ub  [	        S5      eUc  Uc  [	        S5      eUS;  a  [	        S5      eUc  US	:X  a  U" UR
                  S
   SSS9nOVUS:X  a  U" UR
                  S
   SSS9nO:US:X  a  U" UR
                  S
   SSS9nOUS:X  a  U" UR
                  S
   S-  SSS9nUR                  [        R                  " [        U5      5      U5        SnUnU R                  [        R                  [        R                  4;  aJ  U(       a6  UR                  UR                  :w  a  U nX-  nO[        R                  S5        [!        U UUUS9n OU(       a  [        R                  S5        ["        R"                  " 5       nUR%                  XU	S9nUR&                  nUR(                  nUR*                  S:  a  [        R,                  " U5      nUR*                  S:  a  [        R,                  " U5      nUGb  [        R.                  " U Vs/ s H  nUR1                  U5      PM     sn5      nUS;   a2  [        R2                  " UR5                  [        R                  5      SS9nUR5                  [6        5      n[        R8                  " SUU5      n[        R:                  " U* 5      SS2SU24   nU[        R                  " [        U 5      5      SS2S4   U4   nU[        R                  " [        U 5      5      SS2S4   U4   n["        R"                  " 5       U-
  n[=        [        U 5      5       VVVs/ s H@  n[?        UU   UU   5       VVs/ s H  u  nn[7        U5      [A        U5      S.PM     snnPMB     snnnU4nU
(       a  / UQUP7nU$ s  snf s  snnf s  snnnf ) aX  
Performs semantic search using the usearch library.

Rescoring will be performed if:
1. `rescore` is True
2. The query embeddings are not quantized
3. The corpus is quantized, i.e. the corpus precision is not float32
Only if these conditions are true, will we search for `top_k * rescore_multiplier` samples and then rescore to only
keep `top_k`.

Args:
    query_embeddings: Embeddings of the query sentences. Ideally not
        quantized to allow for rescoring.
    corpus_embeddings: Embeddings of the corpus sentences. Either
        `corpus_embeddings` or `corpus_index` should be used, not
        both. The embeddings can be quantized to "int8" or "binary"
        for more efficient search.
    corpus_index: usearch index for the corpus sentences. Either
        `corpus_embeddings` or `corpus_index` should be used, not
        both.
    corpus_precision: Precision of the corpus embeddings. The
        options are "float32", "int8", "ubinary" or "binary". Default
        is "float32".
    top_k: Number of top results to retrieve. Default is 10.
    ranges: Ranges for quantization of embeddings. This is only used
        for int8 quantization, where the ranges refers to the
        minimum and maximum values for each dimension. So, it's a 2D
        array with shape (2, embedding_dim). Default is None, which
        means that the ranges will be calculated from the
        calibration embeddings.
    calibration_embeddings: Embeddings used for calibration during
        quantization. This is only used for int8 quantization, where
        the calibration embeddings can be used to compute ranges,
        i.e. the minimum and maximum values for each dimension.
        Default is None, which means that the ranges will be
        calculated from the query embeddings. This is not
        recommended.
    rescore: Whether to perform rescoring. Note that rescoring still
        will only be used if the query embeddings are not quantized
        and the corpus is quantized, i.e. the corpus precision is
        not "float32". Default is True.
    rescore_multiplier: Oversampling factor for rescoring. The code
        will now search `top_k * rescore_multiplier` samples and
        then rescore to only keep `top_k`. Default is 2.
    exact: Whether to use exact search or approximate search.
        Default is True.
    output_index: Whether to output the usearch index used for the
        search. Default is False.

Returns:
    A tuple containing a list of search results and the time taken
    for the search. If `output_index` is True, the tuple will also
    contain the usearch index used for the search.

Raises:
    ValueError: If both `corpus_embeddings` and `corpus_index` are
        provided or if neither is provided.

The list of search results is in the format: [[{"corpus_id": int, "score": float}, ...], ...]
The time taken for the search is a float value.
r   )
ScalarKind)IndexNr   r	   )r
   r&   r   binaryzKcorpus_precision must be "float32", "int8", "ubinary", "binary" for usearchr
   r   cosf32)ndimmetricr$   r&   ipi8rU   hammingr   r   b1zRescoring is enabled but the corpus is not quantized. Either pass `rescore=False` or quantize the corpus embeddings with `quantize_embeddings(embeddings, precision="...") `and pass `corpus_precision="..."` to `semantic_search_usearch`.r   r   )countr@      )r   rU   r   r   r   r   )!usearch.compiledrS   usearch.indexrT   r   r   r#   r%   r4   r5   r$   r   r&   F32r'   r(   r)   r*   r+   	distanceskeysrX   
atleast_2dr,   getr/   r0   r1   r2   r3   r6   r7   r8   )r9   r:   r;   r<   r=   r   r   r>   r?   r@   rA   rS   rT   rB   rC   rD   matchesrE   rF   rG   rI   rJ   rK   rL   rM   r   rN   rO   s                               rP   semantic_search_usearchrh      s   T ,#$)A[\\ \%9STTGGfgg y( &,,Q/L
 ' &,,Q/L
 ) &,,Q/ L
 * &,,Q/!3 L
 	3'8#9:<MN
 Abhh%88!!Z^^3%5"'V /&#9	
 
 O	

 iikG!!"25!IGFllG{{Qv&||a--( %88Za$bZa\%5%5m%DZa$bc44!}}-=-D-DRXX-NUWX+2237 ))L2DFVW::&676E6	B"))C(8$9:1d7CEUUV 3/?+@!A!T'!JL\!\]iikG#G "#&6"78	

 9 (+6(+;WX=N'O'OOE8 "(meElC'O 9	
 		G *G*\*N= %c(	
s   N(
N3%$N-	N3-N3c                   [        U [        5      (       a  U R                  5       R                  5       n Op[        U [        5      (       a[  [        U S   [        5      (       a-  U  Vs/ s H   oDR                  5       R                  5       PM"     n n[
        R                  " U 5      n U R                  [
        R                  [
        R                  4;   a  [        S5      eUS:X  a  U R                  [
        R                  5      $ UR                  S5      (       GaH  Uc  Ub?  [
        R                  " [
        R                  " USS9[
        R                   " USS945      nOU R"                  S   S:  a<  [$        R'                  SU S	[)        U 5       S
[)        U 5      S:w  a  SOS SU S3	5        [
        R                  " [
        R                  " U SS9[
        R                   " U SS945      nUSSS24   nUSSS24   USSS24   -
  S-  nUS:X  a$  X-
  U-  R                  [
        R                  5      $ US:X  a'  X-
  U-  S-
  R                  [
        R                  5      $ US:X  aV  [
        R*                  " U S:  5      R-                  U R"                  S   S5      S-
  R                  [
        R                  5      $ US:X  a6  [
        R*                  " U S:  5      R-                  U R"                  S   S5      $ [/        SU< S35      es  snf )a  
Quantizes embeddings to a lower precision. This can be used to reduce the memory footprint and increase the
speed of similarity search. The supported precisions are "float32", "int8", "uint8", "binary", and "ubinary".

Args:
    embeddings: Unquantized (e.g. float) embeddings with to quantize
        to a given precision
    precision: The precision to convert to. Options are "float32",
        "int8", "uint8", "binary", "ubinary".
    ranges (Optional[np.ndarray]): Ranges for quantization of
        embeddings. This is only used for int8 quantization, where
        the ranges refers to the minimum and maximum values for each
        dimension. So, it's a 2D array with shape (2,
        embedding_dim). Default is None, which means that the ranges
        will be calculated from the calibration embeddings.
    calibration_embeddings (Optional[np.ndarray]): Embeddings used
        for calibration during quantization. This is only used for
        int8 quantization, where the calibration embeddings can be
        used to compute ranges, i.e. the minimum and maximum values
        for each dimension. Default is None, which means that the
        ranges will be calculated from the query embeddings. This is
        not recommended.

Returns:
    Quantized embeddings with the specified precision
r   z?Embeddings to quantize must be float rather than int8 or uint8.r
   r&   Nr   d   z
Computing z quantization buckets based on z
 embeddingr   s z. z quantization is more stable with `ranges` calculated from more embeddings or a `calibration_embeddings` that can be used to calculate the buckets.   r      rU   r   r   z
Precision z is not supported)
isinstancer   cpunumpylistr%   r,   r$   r   r&   	Exceptionr0   r
   endswithvstackminmaxr   r'   r(   r5   packbitsreshaper   )
embeddingsr   r   r   	embeddingstartsstepss          rP   r)   r)   s  s   @ *f%%^^%++-
	J	%	%jmV,,CMN:i--///1:JNXXj)
BHHbgg..YZZI  ,,&!!>%1BFF+A$JBFFSipqLr#st##A&,NN$YK/NsS]N__iqtu  rA  EF  rFjm  LN  jO O%; 'cc
 BFF:A$>zXY@Z#[\11q!t,3(E199"((CC& (E1C7??HHHJN+33J4D4DQ4GLsRZZ[][b[bccI{{:>*22:3C3CA3FKK
z).?@
AAG Os   &'L
)
NNr
   
   NNTr_   TF)r9   
np.ndarrayr:   np.ndarray | Noner;   zfaiss.Index | Noner<   z&Literal['float32', 'uint8', 'ubinary']r=   r1   r   r   r   r   r>   boolr?   r1   r@   r   rA   r   returnz=tuple[list[list[dict[str, int | float]]], float, faiss.Index])r9   r   r:   r   r;   zusearch.index.Index | Noner<   z$Literal['float32', 'int8', 'binary']r=   r1   r   r   r   r   r>   r   r?   r1   r@   r   rA   r   r   zEtuple[list[list[dict[str, int | float]]], float, usearch.index.Index])NN)
rz   zTensor | np.ndarrayr   z8Literal['float32', 'int8', 'uint8', 'binary', 'ubinary']r   r   r   r   r   r   )
__future__r   loggingr*   typingr   r   rq   r%   torchr   	getLogger__name__r'   r   usearchrQ   rh   r)        rP   <module>r      s   "   )  			8	$ 
 ,0'+?H $04d d(d %d =	d
 d d .d d d d d CdR ,0/3=F $04w w(w -w ;	w
 w w .w w w w w Kwz !%04	GB#GBGGB GB .	GB
 GBr   