U
    h1                     @   s   d dl Z d dlZd dlmZmZmZmZmZmZm	Z	m
Z
 d dlmZ d dlmZmZ d dlmZ d dlmZ d dlmZ d dlmZ d d	lmZmZmZ d d
lmZmZmZm Z  d dl!m"Z"m#Z# G dd deZ$eddddG dd dee$Z%dS )    N)AnyAsyncIteratorCallableDictIteratorListMappingOptional)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseLanguageModel)LLM)GenerationChunk)PromptValue)Field	SecretStrroot_validator)check_package_versionget_from_dict_or_envget_pydantic_field_namespre_init)build_extra_kwargsconvert_to_secret_strc                   @   s  e Zd ZU dZeed< dZeed< edddZe	ed< edd	dZ
eed
< dZee ed< dZee ed< dZee ed< dZeed< dZee ed< dZeed< dZee	 ed< dZee ed< dZee	 ed< dZee	 ed< dZeee	gef  ed< eedZee	ef ed< eddeedddZ e!eeddd Z"e#e$e	ef d!d"d#Z%e#e$e	ef d!d$d%Z&d)ee'e	  e'e	 d&d'd(Z(dS )*_AnthropicCommonNclientasync_clientzclaude-2Z
model_name)defaultaliasmodel   Z
max_tokensmax_tokens_to_sampletemperaturetop_ktop_pF	streamingdefault_request_timeout   max_retriesanthropic_api_urlanthropic_api_keyHUMAN_PROMPT	AI_PROMPTcount_tokens)default_factorymodel_kwargsT)prevaluesreturnc                 C   s(   | di }t| }t||||d< |S )Nr/   )getr   r   )clsr2   extraZall_required_field_names r7   F/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/anthropic.pybuild_extraE   s      z_AnthropicCommon.build_extrac                 C   s   t t|dd|d< t|dddd|d< zddl}td	d
d |j|d |d  |d |d d|d< |j|d |d  |d |d d|d< |j|d< |j|d< |d j	|d< W n t
k
r   t
dY nX |S )z?Validate that api key and python package exists in environment.r*   ZANTHROPIC_API_KEYr)   ZANTHROPIC_API_URLzhttps://api.anthropic.com)r   r   N	anthropicz0.3)Zgte_versionr&   r(   )base_urlZapi_keytimeoutr(   r   r   r+   r,   r-   z]Could not import anthropic python package. Please it install it with `pip install anthropic`.)r   r   r:   r   	AnthropicZget_secret_valueZAsyncAnthropicr+   r,   r-   ImportError)r5   r2   r:   r7   r7   r8   validate_environmentN   s@    








z%_AnthropicCommon.validate_environmentr3   c                 C   sT   | j | jd}| jdk	r"| j|d< | jdk	r6| j|d< | jdk	rJ| j|d< || jS )z5Get the default parameters for calling Anthropic API.)r!   r   Nr"   r#   r$   )r!   r   r"   r#   r$   r/   )selfdr7   r7   r8   _default_paramsw   s    





z _AnthropicCommon._default_paramsc                 C   s
   i | j S )zGet the identifying parameters.)rC   rA   r7   r7   r8   _identifying_params   s    z$_AnthropicCommon._identifying_params)stopr3   c                 C   s2   | j r| jstd|d kr g }|| j g |S )N-Please ensure the anthropic package is loaded)r+   r,   	NameErrorextend)rA   rF   r7   r7   r8   _get_anthropic_stop   s    z$_AnthropicCommon._get_anthropic_stop)N))__name__
__module____qualname__r   r   __annotations__r   r   r   strr!   intr"   r	   floatr#   r$   r%   boolr&   r(   r)   r*   r   r+   r,   r-   r   dictr/   r   r   r9   r   r?   propertyr   rC   rE   r   rJ   r7   r7   r7   r8   r   !   s2   
(r   z0.0.28z1.0z langchain_anthropic.AnthropicLLM)ZsinceZremovalZalternative_importc                   @   s  e Zd ZdZG dd dZeeedddZee	ddd	Z
e	e	d
ddZde	eee	  ee ee	dddZee	d
ddZde	eee	  ee ee	dddZde	eee	  ee eee dddZde	eee	  ee eee dddZe	edddZdS ) r=   a  Anthropic large language models.

    To use, you should have the ``anthropic`` python package installed, and the
    environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            import anthropic
            from langchain_community.llms import Anthropic

            model = Anthropic(model="<model_name>", anthropic_api_key="my-api-key")

            # Simplest invocation, automatically wrapped with HUMAN_PROMPT
            # and AI_PROMPT.
            response = model.invoke("What are the biggest risks facing humanity?")

            # Or if you want to use the chat mode, build a few-shot-prompt, or
            # put words in the Assistant's mouth, use HUMAN_PROMPT and AI_PROMPT:
            raw_prompt = "What are the biggest risks facing humanity?"
            prompt = f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}"
            response = model.invoke(prompt)
    c                   @   s   e Zd ZdZdZdS )zAnthropic.ConfigTN)rK   rL   rM   Zallow_population_by_field_nameZarbitrary_types_allowedr7   r7   r7   r8   Config   s   rU   r1   c                 C   s   t d |S )z,Raise warning that this class is deprecated.zpThis Anthropic LLM is deprecated. Please use `from langchain_community.chat_models import ChatAnthropic` instead)warningswarn)r5   r2   r7   r7   r8   raise_warning   s    zAnthropic.raise_warningr@   c                 C   s   dS )zReturn type of llm.zanthropic-llmr7   rD   r7   r7   r8   	_llm_type   s    zAnthropic._llm_type)promptr3   c                 C   s\   | j r| jstd|| j r$|S td| j |\}}|dkrD|S | j  d| | j dS )NrG   z
^\n*Human:    z Sure, here you go:
)r+   r,   rH   
startswithresubn)rA   rZ   Zcorrected_promptZn_subsr7   r7   r8   _wrap_prompt   s    zAnthropic._wrap_promptN)rZ   rF   run_managerkwargsr3   c           	      K   sp   | j r6d}| jf |||d|D ]}||j7 }q"|S | |}| j|}| jjjf | ||d|}|j	S )a  Call out to Anthropic's completion endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                prompt = "What are the biggest risks facing humanity?"
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                response = model.invoke(prompt)

         rZ   rF   ra   rZ   stop_sequences)
r%   _streamtextrJ   rC   r   completionscreater`   
completion	rA   rZ   rF   ra   rb   rk   chunkparamsresponser7   r7   r8   _call   s(      



zAnthropic._callc                 C   s   |  | S )N)r`   Z	to_string)rA   rZ   r7   r7   r8   convert_prompt  s    zAnthropic.convert_promptc           	         s   | j r@d}| jf |||d|2 z3 dH W }||j7 }q"6 |S | |}| j|}| jjjf | ||d|I dH }|j	S )z;Call out to Anthropic's completion endpoint asynchronously.rc   rd   Nre   )
r%   _astreamrh   rJ   rC   r   ri   rj   r`   rk   rl   r7   r7   r8   _acall  s(      


zAnthropic._acallc                 k   sf   |  |}| j|}| jjjf | ||dd|D ]*}t|jd}|rZ|j|j	|d |V  q6dS )a\  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python

                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        TrZ   rf   streamrh   rm   N)
rJ   rC   r   ri   rj   r`   r   rk   on_llm_new_tokenrh   rA   rZ   rF   ra   rb   rn   tokenrm   r7   r7   r8   rg     s    


  
zAnthropic._streamc                 K  s|   |  |}| j|}| jjjf | ||dd|I dH 2 z83 dH W }t|jd}|rn|j|j	|dI dH  |V  q<6 dS )a[  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python
                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        Trt   Nrv   rw   )
rJ   rC   r   ri   rj   r`   r   rk   rx   rh   ry   r7   r7   r8   rr   A  s    


zAnthropic._astream)rh   r3   c                 C   s   | j std|  |S )zCalculate number of tokens.rG   )r-   rH   )rA   rh   r7   r7   r8   get_num_tokense  s    zAnthropic.get_num_tokens)NN)NN)NN)NN)rK   rL   rM   __doc__rU   r   r   rX   rT   rO   rY   r`   r	   r   r   r   rp   r   rq   r   rs   r   r   rg   r   rr   rP   r{   r7   r7   r7   r8   r=      sZ   	  
)  
  
%  
$r=   )&r^   rV   typingr   r   r   r   r   r   r   r	   Zlangchain_core._api.deprecationr
   Zlangchain_core.callbacksr   r   Zlangchain_core.language_modelsr   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   Zlangchain_core.prompt_valuesr   Zlangchain_core.pydantic_v1r   r   r   Zlangchain_core.utilsr   r   r   r   Zlangchain_core.utils.utilsr   r   r   r=   r7   r7   r7   r8   <module>   s$   (w