
    ߔ9i                          d dl Z d dlmZmZmZmZmZ ddlmZ ddl	m
Z
 ddlmZmZmZmZmZ ddlmZ  ej&                  e      Zg dZ G d	 d
      Zy)    N)AnyDictListOptionalUnion   )INFERENCE_ENDPOINT)HfApi)build_hf_headersget_sessionis_pillow_availableloggingvalidate_hf_hub_args)_deprecate_method)ztext-classificationztoken-classificationztable-question-answeringzquestion-answeringzzero-shot-classificationtranslationsummarizationconversationalzfeature-extractionztext-generationztext2text-generationz	fill-maskzsentence-similarityztext-to-speechzautomatic-speech-recognitionzaudio-to-audiozaudio-classificationzvoice-activity-detectionzimage-classificationzobject-detectionzimage-segmentationztext-to-imagezimage-to-imageztabular-classificationztabular-regressionc                       e Zd ZdZe edd      	 	 	 ddedee   dee   d	efd
              Z	d Z
	 	 	 	 ddeeeeee   eee      f      dee   dee   dedef
dZy)InferenceApia  Client to configure requests and make calls to the HuggingFace Inference API.

    Example:

    ```python
    >>> from huggingface_hub.inference_api import InferenceApi

    >>> # Mask-fill example
    >>> inference = InferenceApi("bert-base-uncased")
    >>> inference(inputs="The goal of life is [MASK].")
    [{'sequence': 'the goal of life is life.', 'score': 0.10933292657136917, 'token': 2166, 'token_str': 'life'}]

    >>> # Question Answering example
    >>> inference = InferenceApi("deepset/roberta-base-squad2")
    >>> inputs = {
    ...     "question": "What's my name?",
    ...     "context": "My name is Clara and I live in Berkeley.",
    ... }
    >>> inference(inputs)
    {'score': 0.9326569437980652, 'start': 11, 'end': 16, 'answer': 'Clara'}

    >>> # Zero-shot example
    >>> inference = InferenceApi("typeform/distilbert-base-uncased-mnli")
    >>> inputs = "Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!"
    >>> params = {"candidate_labels": ["refund", "legal", "faq"]}
    >>> inference(inputs, params)
    {'sequence': 'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!', 'labels': ['refund', 'faq', 'legal'], 'scores': [0.9378499388694763, 0.04914155602455139, 0.013008488342165947]}

    >>> # Overriding configured task
    >>> inference = InferenceApi("bert-base-uncased", task="feature-extraction")

    >>> # Text-to-image
    >>> inference = InferenceApi("stabilityai/stable-diffusion-2-1")
    >>> inference("cat")
    <PIL.PngImagePlugin.PngImageFile image (...)>

    >>> # Return as raw response to parse the output yourself
    >>> inference = InferenceApi("mio/amadeus")
    >>> response = inference("hello world", raw_response=True)
    >>> response.headers
    {"Content-Type": "audio/flac", ...}
    >>> response.content # raw bytes from server
    b'(...)'
    ```
    z0.19.0z`InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out this guide to learn how to convert your script to use it: https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client.)versionmessageNrepo_idtasktokengpuc                    d|d| _         t        |      | _        t        |      j	                  |      }|j
                  s|st        d      |rC||j
                  k7  r4|t        vrt        d| d      t        j                  d       || _
        n$|j
                  J d
       |j
                  | _
        t         d| j                   d| | _        y	)ak  Inits headers and API call information.

        Args:
            repo_id (``str``):
                Id of repository (e.g. `user/bert-base-uncased`).
            task (``str``, `optional`, defaults ``None``):
                Whether to force a task instead of using task specified in the
                repository.
            token (`str`, `optional`):
                The API token to use as HTTP bearer authorization. This is not
                the authentication token. You can find the token in
                https://huggingface.co/settings/token. Alternatively, you can
                find both your organizations and personal API tokens using
                `HfApi().whoami(token)`.
            gpu (`bool`, `optional`, defaults `False`):
                Whether to use GPU instead of CPU for inference(requires Startup
                plan at least).
        T)wait_for_modeluse_gpu)r   )r   zTask not specified in the repository. Please add it to the model card using pipeline_tag (https://huggingface.co/docs#how-is-a-models-type-of-inference-api-and-widget-determined)zInvalid task z. Make sure it's valid.zlYou're using a different task than the one specified in the repository. Be sure to know what you're doing :)NzPipeline tag cannot be Nonez
/pipeline//)optionsr   headersr
   
model_infopipeline_tag
ValueError	ALL_TASKSloggerwarningr   r	   api_url)selfr   r   r   r   r"   s         g/var/www/html/backtest/airagagent/rag_env/lib/python3.12/site-packages/huggingface_hub/inference_api.py__init__zInferenceApi.__init__]   s    D +/3?'e4 '2272C
&&tm  DJ3339$ =6M!NOONND DI**6U8UU6"//DI,-Z		{!G9M    c                 V    d| j                    d| j                   d| j                   dS )NzInferenceAPI(api_url='z	', task='z', options=))r(   r   r    )r)   s    r*   __repr__zInferenceApi.__repr__   s.    '~YtyykUYUaUaTbbcddr,   inputsparamsdataraw_responsereturnc                    d| j                   i}|r||d<   |r||d<   t               j                  | j                  | j                  ||      }|r|S |j                  j                  d      xs d}|j                  d      rWt               st        d| j                   d	      d
dl
m} |j                  t        j                  |j                              S |dk(  r|j!                         S t#        | d      )a
  Make a call to the Inference API.

        Args:
            inputs (`str` or `Dict` or `List[str]` or `List[List[str]]`, *optional*):
                Inputs for the prediction.
            params (`Dict`, *optional*):
                Additional parameters for the models. Will be sent as `parameters` in the
                payload.
            data (`bytes`, *optional*):
                Bytes content of the request. In this case, leave `inputs` and `params` empty.
            raw_response (`bool`, defaults to `False`):
                If `True`, the raw `Response` object is returned. You can parse its content
                as preferred. By default, the content is parsed into a more practical format
                (json dictionary or PIL Image for example).
        r    r0   
parameters)r!   jsonr2   zContent-Type imagezTask 'z' returned as image but Pillow is not installed. Please install it (`pip install Pillow`) or pass `raw_response=True` to get the raw `Response` object and parse the image by yourself.r   )Imagezapplication/jsonz output type is not implemented yet. You can pass `raw_response=True` to get the raw `Response` object and parse the output by yourself.)r    r   postr(   r!   get
startswithr   ImportErrorr   PILr:   openioBytesIOcontentr7   NotImplementedError)	r)   r0   r1   r2   r3   payloadresponsecontent_typer:   s	            r*   __call__zInferenceApi.__call__   s   0 t||#
  &GH$*GL! =%%dllDLLw]a%b O  ''++N;Ar""7+&(!TYYK (. .  "::bjj)9)9:;;//==?"%. !' ' r,   )NNF)NNNF)__name__
__module____qualname____doc__r   r   strr   boolr+   r/   r   r   r   bytesr   rH    r,   r*   r   r   .   s    ,\ h ##2N2N sm2N }	2N
 2N 2Nhe JN!% $";sD$s)T$s)_DEF; ; uo	;
 ; 
;r,   r   )rA   typingr   r   r   r   r   	constantsr	   hf_apir
   utilsr   r   r   r   r   utils._deprecationr   
get_loggerrI   r&   r%   r   rP   r,   r*   <module>rW      sE    	 3 3 )  d d 1 
		H	%	Bk kr,   