
    9i8                         d dl Z d dlmZmZmZmZ ddlmZmZm	Z	m
Z
 ddlmZmZ  e       rd dlZ e	       rd dlZ e
j"                  e      Z G d d      Z eed	       G d
 de             Zy)    N)AnyDictListUnion   )add_end_docstringsis_tf_availableis_torch_availablelogging   )PIPELINE_INIT_ARGSPipelinec                       e Zd ZdZ	 ddeeeeeef      f   dej                  fdZ
d Zdeeef   fdZdd	ed
efdZdefdZd Zd Zd Zd Zd Zd Zd Zed        Zed        Zed        Zed        Zy)Conversationa  
    Utility class containing a conversation and its history. This class is meant to be used as an input to the
    [`ConversationalPipeline`]. The conversation contains several utility functions to manage the addition of new user
    inputs and generated model responses.

    Arguments:
        messages (Union[str, List[Dict[str, str]]], *optional*):
            The initial messages to start the conversation, either a string, or a list of dicts containing "role" and
            "content" keys. If a string is passed, it is interpreted as a single message with the "user" role.
        conversation_id (`uuid.UUID`, *optional*):
            Unique identifier for the conversation. If not provided, a random UUID4 id will be assigned to the
            conversation.

    Usage:

    ```python
    conversation = Conversation("Going to the movies tonight - any suggestions?")
    conversation.add_message({"role": "assistant", "content": "The Big lebowski."})
    conversation.add_message({"role": "user", "content": "Is it good?"})
    ```Nmessagesconversation_idc                 6   |st        j                         }||j                  dd       }|d|dg}ng }nt        |t              rd|dg}d| _        |j                  dd       }|j                  dd       }||t        d      |g }|g }t        t        t        |      t        |      g            D ]M  }|t        |      k  r|j                  d||   d       |t        |      k  s7|j                  d||   d       O ||z   }|| _         || _        y )	Ntextuserrolecontentr   generated_responsespast_user_inputsz>generated_responses cannot be passed without past_user_inputs!	assistant)uuiduuid4pop
isinstancestr_num_processed_user_inputs
ValueErrorrangemaxlenappendr   )	selfr   r   deprecated_kwargsr   r   r   legacy_messagesis	            o/var/www/html/backtest/airagagent/rag_env/lib/python3.12/site-packages/transformers/pipelines/conversational.py__init__zConversation.__init__(   sP    "jjlO$((6D%+=>#&!'H=>H +,'/334I4P,001CTJ*/?/G]^^' O"*&(#3$4 5s;N7OPQR es+,,#**FGWXYGZ+[\s.//#**KL_`aLb+cd	e
 '1H#	     c                     t        |t              sy| j                  |j                  k(  xs | j                  |j                  k(  S )NF)r   r   r   r   )r'   others     r+   __eq__zConversation.__eq__M   s4    %.yyEJJ&I$--5>>*IIr-   messagec                     t        |j                               ddhk(  st        d      |d   dvrt        d      | j                  j	                  |       y )Nr   r   z6Message should contain only 'role' and 'content' keys!)r   r   systemzBOnly 'user', 'assistant' and 'system' roles are supported for now!)setkeysr"   r   r&   r'   r1   s     r+   add_messagezConversation.add_messageR   sQ    7<<>"vy&99UVV6?"AAabbW%r-   r   	overwritec                    t        |       dkD  r[| d   d   dk(  rP|r+t        j                  d| d   d    d| d       || d   d<   yt        j                  d| d   d    d	| d
       y| j                  j	                  d|d       y)a3  
        Add a user input to the conversation for the next round. This is a legacy method that assumes that inputs must
        alternate user/assistant/user/assistant, and so will not add multiple user messages in succession. We recommend
        just using `add_message` with role "user" instead.
        r   r   r   z8User input added while unprocessed input was existing: "r   z" was overwritten with: "z".z" new input ignored: "z>". Set `overwrite` to True to overwrite unprocessed user inputr   N)r%   loggerwarningr   r&   )r'   r   r8   s      r+   add_user_inputzConversation.add_user_inputY   s     t9q=T"Xf-7NtTVxXaObNc d"V2' '+R#NtTVxXaObNc d!!%&df
 MM  &T!BCr-   responsec                 @    | j                   j                  d|d       y)zr
        This is a legacy method. We recommend just using `add_message` with an appropriate role instead.
        r   r   N)r   r&   )r'   r>   s     r+   append_responsezConversation.append_responsen   s     	khGHr-   c                 8    t        | j                        | _        y)a)  
        This is a legacy method, as the Conversation no longer distinguishes between processed and unprocessed user
        input. We set a counter here to keep behaviour mostly backward-compatible, but in general you should just read
        the messages directly when writing new code.
        N)r%   _user_messagesr!   r'   s    r+   mark_processedzConversation.mark_processedt   s     +.d.A.A*B'r-   c              #   6   K   | j                   D ]  }|  y wNr   r6   s     r+   __iter__zConversation.__iter__|   s     }} 	GM	s   c                      | j                   |   S rF   rG   )r'   items     r+   __getitem__zConversation.__getitem__   s    }}T""r-   c                 "    || j                   |<   y rF   rG   )r'   keyvalues      r+   __setitem__zConversation.__setitem__   s    "cr-   c                 ,    t        | j                        S rF   )r%   r   rC   s    r+   __len__zConversation.__len__   s    4==!!r-   c                 j    d| j                    d}| j                  D ]  }||d    d|d    dz  } |S )a  
        Generates a string representation of the conversation.

        Returns:
            `str`:

        Example:
            Conversation id: 7d15686b-dc94-49f2-9c4b-c9eac6a1f114 user: Going to the movies tonight - any suggestions?
            bot: The Big Lebowski
        zConversation id: 
r   z: r   )r   r   )r'   outputr1   s      r+   __repr__zConversation.__repr__   sR     %TYYKr2}} 	CG)GI,>+?rBBF	Cr-   c              #   L   K   | j                   D ]  }|d   dk(  |d   f  y wNr   r   r   rG   r6   s     r+   
iter_textszConversation.iter_texts   s6      }} 	@G&/V+WY-???	@s   "$c                 X    | j                   D cg c]  }|d   dk(  s|d    c}S c c}w rW   rG   r6   s     r+   rB   zConversation._user_messages   s-     37--]w76?V\C\	"]]]   ''c                     | j                   sg S | j                  d   d   dk7  s"| j                  t        | j                         k(  r| j                   S | j                   d d S )Nr:   r   r   )rB   r   r!   r%   rC   s    r+   r   zConversation.past_user_inputs   sc    
 ""I
 ==V$.$2Q2QUXY]YlYlUm2m&&&""3B''r-   c                 X    | j                   D cg c]  }|d   dk(  s|d    c}S c c}w )Nr   r   r   rG   r6   s     r+   r   z Conversation.generated_responses   s-     37--bw76?VaCa	"bbbrZ   c                      | j                   d   S )Nr:   )rB   rC   s    r+   new_user_inputzConversation.new_user_input   s     ""2&&r-   )NN)F)__name__
__module____qualname____doc__r   r    r   r   r   UUIDr,   r0   r7   boolr=   r@   rD   rH   rK   rO   rQ   rU   rX   propertyrB   r   r   r^    r-   r+   r   r      s    , _c#!c4S#X#778#!RVR[R[#!JJ
&4S> &D3 D4 D*I IC##" @ ^ ^
 ( ( c c
 ' 'r-   r   a  
        min_length_for_response (`int`, *optional*, defaults to 32):
            The minimum length (in number of tokens) for a response.
        minimum_tokens (`int`, *optional*, defaults to 10):
            The minimum length of tokens to leave for a response.
    c                        e Zd ZdZ fdZ	 ddZddeee   e	ee	   f   f fdZ
dde	deeef   fdZdd	Zdd
Z xZS )ConversationalPipelinea  
    Multi-turn conversational pipeline.

    Example:

    ```python
    >>> from transformers import pipeline, Conversation

    >>> chatbot = pipeline(model="microsoft/DialoGPT-medium")
    >>> conversation = Conversation("Going to the movies tonight - any suggestions?")
    >>> conversation = chatbot(conversation)
    >>> conversation.generated_responses[-1]
    'The Big Lebowski'

    >>> conversation.add_user_input("Is it an action movie?")
    >>> conversation = chatbot(conversation)
    >>> conversation.generated_responses[-1]
    "It's a comedy."
    ```

    Learn more about the basics of using a pipeline in the [pipeline tutorial](../pipeline_tutorial)

    This conversational pipeline can currently be loaded from [`pipeline`] using the following task identifier:
    `"conversational"`.

    The models that this pipeline can use are models that have been fine-tuned on a multi-turn conversational task,
    currently: *'microsoft/DialoGPT-small'*, *'microsoft/DialoGPT-medium'*, *'microsoft/DialoGPT-large'*. See the
    up-to-date list of available models on
    [huggingface.co/models](https://huggingface.co/models?filter=conversational).
    c                     t        |   |i | | j                  j                  &| j                  j                  | j                  _        y y rF   )superr,   	tokenizerpad_token_id	eos_token	pad_token)r'   argskwargs	__class__s      r+   r,   zConversationalPipeline.__init__   s@    $)&)>>&&.'+~~'?'?DNN$ /r-   c                     i }i }i }|||d<   |||d<   d|v r|d   |d<   |||d<   |r|j                  |       |||fS )Nmin_length_for_responseminimum_tokens
max_lengthclean_up_tokenization_spaces)update)r'   rs   rt   rv   generate_kwargspreprocess_paramsforward_paramspostprocess_paramss           r+   _sanitize_parametersz+ConversationalPipeline._sanitize_parameters   s     ".;R78%/=N+,?*+:<+HN<('3A]=>!!/2 .2DDDr-   conversationsc                 P   t        |t              rt        |d   t              rt        |      }n;t        |t              r+t        |d   t              r|D cg c]  }t        |       }}t	        |   |fd|i|}t        |t              rt        |      dk(  r|d   S |S c c}w )a/  
        Generate responses for the conversation(s) given as inputs.

        Args:
            conversations (a [`Conversation`] or a list of [`Conversation`]):
                Conversation to generate responses for. Inputs can also be passed as a list of dictionaries with `role`
                and `content` keys - in this case, they will be converted to `Conversation` objects automatically.
                Multiple conversations in either format may be passed as a list.
            clean_up_tokenization_spaces (`bool`, *optional*, defaults to `False`):
                Whether or not to clean up the potential extra spaces in the text output.
            generate_kwargs:
                Additional keyword arguments to pass along to the generate method of the model (see the generate method
                corresponding to your framework [here](./model#generative-models)).

        Returns:
            [`Conversation`] or a list of [`Conversation`]: Conversation(s) with updated generated responses for those
            containing a new user input.
        r   num_workersr   )r   listdictr   rj   __call__r%   )r'   r}   r   rp   convoutputsrq   s         r+   r   zConversationalPipeline.__call__  s    . mT*z-:JD/Q(7Mt,M!<Ld1S<IJD\$/JMJ'"=TkTVTgt$W):1:	 Ks   B#conversationreturnc                     | j                   j                  |d      }| j                  dk(  rt        j                  |g      }n%| j                  dk(  rt        j                  |g      }||dS )NT)add_generation_promptpttf)	input_idsr   )rk   apply_chat_template	frameworktorch
LongTensorr   constant)r'   r   rs   r   s       r+   
preprocessz!ConversationalPipeline.preprocess&  sa    NN66|[_6`	>>T!(()5I^^t#YK0I&EEr-   c                    |d   j                   d   }|j                  d      }d|vr	d|vrd|d<    | j                  j                  di ||}| j                  j                  j
                  rd}n|}|d d |d f   |dS )	Nr   r   r   ru   max_new_tokens   )
output_idsr   rf   )shaper   modelgenerateconfigis_encoder_decoder)r'   model_inputsrt   rx   nr   r   start_positions           r+   _forwardzConversationalPipeline._forward/  s    %++A.#''7.3C?3Z03O,-(TZZ((K<K?K
:://NN(NO);<l[[r-   c                     |d   }| j                   j                  |d   d|      }|d   }|j                  d|d       |S )Nr   r   T)skip_special_tokensrv   r   r   r   )rk   decoder7   )r'   model_outputsrv   r   answerr   s         r+   postprocessz"ConversationalPipeline.postprocess;  sW    "<0
&&qM $)E ' 

 %^4  +&!IJr-   )NNN)r   )    )
   )T)r_   r`   ra   rb   r,   r|   r   r   r   r   r   r    r   r   r   r   __classcell__)rq   s   @r+   rh   rh      sk    >@ _cE,eDJd<FX,X&Y @F| FTXY\^aYaTb F
\	r-   rh   )r   typingr   r   r   r   utilsr   r	   r
   r   baser   r   
tensorflowr   r   
get_loggerr_   r;   r   rh   rf   r-   r+   <module>r      sw     ) ) T T .  
		H	%m' m'` yX yyr-   