U
    h                     @   s   d dl mZmZmZmZmZmZmZ d dlm	Z	 d dl
mZmZ d dlmZmZmZ d dlmZmZmZmZmZmZ d dlmZmZmZ d dlmZ d dlmZ ee e e d	d
dZ!dddee e e e dddZ"e	ddddG dd deeZ#dS )    )AnyAsyncIteratorDictIteratorListOptionalcast)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseChatModelagenerate_from_streamgenerate_from_stream)	AIMessageAIMessageChunkBaseMessageChatMessageHumanMessageSystemMessage)ChatGenerationChatGenerationChunk
ChatResult)PromptValue)_AnthropicCommon)messagehuman_prompt	ai_promptreturnc                 C   s   t t| j}t| tr.d| j  d| }nRt| trH| d| }n8t| trb| d| }nt| t	rr|}nt
d|  |S )Nz

z:  zGot unknown type )r   strcontent
isinstancer   role
capitalizer   r   r   
ValueError)r   r   r   r    Zmessage_text r%   M/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/chat_models/anthropic.py_convert_one_message_to_text   s    



r'   z

Human:z

Assistant:)r   r   )messagesr   r   r   c                   sH   |   } t| d ts&| tdd d fdd| D }| S )a  Format a list of messages into a full prompt for the Anthropic model
    Args:
        messages (List[BaseMessage]): List of BaseMessage to combine.
        human_prompt (str, optional): Human prompt tag. Defaults to "

Human:".
        ai_prompt (str, optional): AI prompt tag. Defaults to "

Assistant:".
    Returns:
        str: Combined string with necessary human_prompt and ai_prompt tags.
     r    c                 3   s   | ]}t | V  qd S N)r'   ).0r   r   r   r%   r&   	<genexpr>A   s   z7convert_messages_to_prompt_anthropic.<locals>.<genexpr>)copyr!   r   appendjoinrstrip)r(   r   r   textr%   r.   r&   $convert_messages_to_prompt_anthropic.   s    r5   z0.0.28z1.0z!langchain_anthropic.ChatAnthropic)ZsinceZremovalZalternative_importc                   @   sN  e Zd ZdZG dd dZeeeef dddZeedddZ	e
edd	d
Ze
ee dddZee edddZeedddZd ee eee  ee eee dddZd!ee eee  ee eee dddZd"ee eee  ee eedddZd#ee eee  ee eedddZeedddZ dS )$ChatAnthropica  `Anthropic` chat large language models.

    To use, you should have the ``anthropic`` python package installed, and the
    environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            import anthropic
            from langchain_community.chat_models import ChatAnthropic
            model = ChatAnthropic(model="<model_name>", anthropic_api_key="my-api-key")
    c                   @   s   e Zd ZdZdZdS )zChatAnthropic.ConfigTN)__name__
__module____qualname__Zallow_population_by_field_nameZarbitrary_types_allowedr%   r%   r%   r&   Config^   s   r:   )r   c                 C   s   ddiS )NZanthropic_api_keyZANTHROPIC_API_KEYr%   selfr%   r%   r&   
lc_secretsb   s    zChatAnthropic.lc_secretsc                 C   s   dS )zReturn type of chat model.zanthropic-chatr%   r;   r%   r%   r&   	_llm_typef   s    zChatAnthropic._llm_typec                 C   s   dS )z9Return whether this model can be serialized by Langchain.Tr%   clsr%   r%   r&   is_lc_serializablek   s    z ChatAnthropic.is_lc_serializablec                 C   s
   dddgS )z*Get the namespace of the langchain object.Z	langchainZchat_modelsZ	anthropicr%   r?   r%   r%   r&   get_lc_namespacep   s    zChatAnthropic.get_lc_namespace)r(   r   c                 C   s6   i }| j r| j |d< | jr$| j|d< tf d|i|S )a  Format a list of messages into a full prompt for the Anthropic model
        Args:
            messages (List[BaseMessage]): List of BaseMessage to combine.
        Returns:
            str: Combined string with necessary HUMAN_PROMPT and AI_PROMPT tags.
        r   r   r(   )ZHUMAN_PROMPTZ	AI_PROMPTr5   )r<   r(   Zprompt_paramsr%   r%   r&   _convert_messages_to_promptu   s    

z)ChatAnthropic._convert_messages_to_prompt)promptr   c                 C   s   |  | S r,   )rC   Zto_messages)r<   rD   r%   r%   r&   convert_prompt   s    zChatAnthropic.convert_promptN)r(   stoprun_managerkwargsr   c                 k   sz   |  |}d|i| j|}|r&||d< | jjjf |ddi}|D ]2}|j}	tt|	dd}
|rn|j|	|
d |
V  qBd S NrD   stop_sequencesstreamTr+   r   )chunk)	rC   _default_paramsclientcompletionscreate
completionr   r   on_llm_new_tokenr<   r(   rF   rG   rH   rD   paramsZstream_respdatadeltarM   r%   r%   r&   _stream   s    
zChatAnthropic._streamc                 K  s   |  |}d|i| j|}|r&||d< | jjjf |ddiI d H }|2 z@3 d H W }|j}	tt|	dd}
|r|j|	|
dI d H  |
V  qH6 d S rI   )	rC   rN   async_clientrP   rQ   rR   r   r   rS   rT   r%   r%   r&   _astream   s    
zChatAnthropic._astreamc                 K   s~   | j r&| j|f||d|}t|S | |}d|i| j|}|rL||d< | jjjf |}|j}	t	|	d}
t
t|
dgdS N)rF   rG   rD   rJ   r+   rL   )Zgenerations)	streamingrX   r   rC   rN   rO   rP   rQ   rR   r   r   r   r<   r(   rF   rG   rH   Zstream_iterrD   rU   responserR   r   r%   r%   r&   	_generate   s2      
zChatAnthropic._generatec                    s   | j r,| j|f||d|}t|I d H S | |}d|i| j|}|rR||d< | jjjf |I d H }|j}	t	|	d}
t
t|
dgdS r[   )r\   rZ   r   rC   rN   rY   rP   rQ   rR   r   r   r   r]   r%   r%   r&   
_agenerate   s2      
zChatAnthropic._agenerate)r4   r   c                 C   s   | j std|  |S )zCalculate number of tokens.z-Please ensure the anthropic package is loaded)Zcount_tokens	NameError)r<   r4   r%   r%   r&   get_num_tokens   s    zChatAnthropic.get_num_tokens)NN)NN)NN)NN)!r7   r8   r9   __doc__r:   propertyr   r   r=   r>   classmethodboolrA   r   rB   r   rC   r   rE   r   r   r   r   r   rX   r
   r   rZ   r   r_   r`   intrb   r%   r%   r%   r&   r6   J   sb     
  
  
  
r6   N)$typingr   r   r   r   r   r   r   Zlangchain_core._api.deprecationr	   Zlangchain_core.callbacksr
   r   Z*langchain_core.language_models.chat_modelsr   r   r   Zlangchain_core.messagesr   r   r   r   r   r   Zlangchain_core.outputsr   r   r   Zlangchain_core.prompt_valuesr   Z"langchain_community.llms.anthropicr   r   r'   r5   r6   r%   r%   r%   r&   <module>   s4   $ 