U
    h(                     @  s   d Z ddlmZ ddlZddlZddlmZmZmZm	Z	 ddl
mZ ddlmZmZ ddlmZmZ ddlmZ dd	lmZmZ erddlZeeZd
ZdZG dd deZdS )z>EverlyAI Endpoints chat wrapper. Relies heavily on ChatOpenAI.    )annotationsN)TYPE_CHECKINGDictOptionalSet)BaseMessage)Fieldroot_validator)convert_to_secret_strget_from_dict_or_envconvert_message_to_dict)
ChatOpenAI_import_tiktokenzhttps://everlyai.xyz/hostedmeta-llama/Llama-2-7b-chat-hfc                      s   e Zd ZU dZeddddZeddddZed	dd
dZdZ	de
d< eeddZde
d< eZde
d< dZde
d< eddddZedddddddZdddd Zd!d"d# fd$d%Z  ZS )&ChatEverlyAIaQ  `EverlyAI` Chat large language models.

    To use, you should have the ``openai`` python package installed, and the
    environment variable ``EVERLYAI_API_KEY`` set with your API key.
    Alternatively, you can use the everlyai_api_key keyword argument.

    Any parameters that are valid to be passed to the `openai.create` call can be passed
    in, even if not explicitly saved on this class.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import ChatEverlyAI
            chat = ChatEverlyAI(model_name="meta-llama/Llama-2-7b-chat-hf")
    str)returnc                 C  s   dS )zReturn type of chat model.zeverlyai-chat selfr   r   L/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/chat_models/everlyai.py	_llm_type.   s    zChatEverlyAI._llm_typezDict[str, str]c                 C  s   ddiS )Neverlyai_api_keyEVERLYAI_API_KEYr   r   r   r   r   
lc_secrets3   s    zChatEverlyAI.lc_secretsboolc                 C  s   dS )NFr   )clsr   r   r   is_lc_serializable7   s    zChatEverlyAI.is_lc_serializableNzOptional[str]r   model)defaultalias
model_nameeverlyai_api_basezOptional[Set[str]]available_modelszSet[str]c                   C  s   t ddgS )z'Get available models from EverlyAI API.r   z(meta-llama/Llama-2-13b-chat-hf-quantized)setr   r   r   r   get_available_modelsD   s
    z!ChatEverlyAI.get_available_modelsT)predict)valuesr   c              
   C  s   t t|dd|d< t|d< zddl}W n, tk
rT } ztd|W 5 d}~X Y nX z|j|d< W n, tk
r } ztd	|W 5 d}~X Y nX d
| krt	|d
< |d
 }| 
 }||krtd| d| d||d< |S )z?Validate that api key and python package exists in environment.r   r   Zopenai_api_keyZopenai_api_baser   NzTCould not import openai python package. Please install it with `pip install openai`.clientz`openai` has no `ChatCompletion` attribute, this is likely due to an old version of the openai package. Try upgrading it with `pip install --upgrade openai`.r"   zModel name z  not found in available models: .r$   )r
   r   DEFAULT_API_BASEopenaiImportErrorZChatCompletionAttributeError
ValueErrorkeysDEFAULT_MODELr&   )r   r)   r-   eexcr"   r$   r   r   r   validate_environment_overrideO   sD    z*ChatEverlyAI.validate_environment_overrideztuple[str, tiktoken.Encoding]c                 C  sb   t  }| jd k	r| j}n| j}z|d}W n, tk
rX   td d}||}Y nX ||fS )Nzgpt-3.5-turbo-0301z5Warning: model not found. Using cl100k_base encoding.Zcl100k_base)r   Ztiktoken_model_namer"   Zencoding_for_modelKeyErrorloggerwarningZget_encoding)r   Z	tiktoken_r   encodingr   r   r   _get_encoding_model}   s    

z ChatEverlyAI._get_encoding_modelzlist[BaseMessage]int)messagesr   c                   s   t jd dkrt |S |  \}}d}d}d}dd |D }|D ]D}||7 }| D ].\}	}
|t|t|
7 }|	dkrX||7 }qXqD|d7 }|S )zCalculate num tokens with tiktoken package.

        Official documentation: https://github.com/openai/openai-cookbook/blob/
        main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb         r   c                 S  s   g | ]}t |qS r   r   ).0mr   r   r   
<listcomp>   s     z=ChatEverlyAI.get_num_tokens_from_messages.<locals>.<listcomp>name)	sysversion_infosuperget_num_tokens_from_messagesr:   itemslenencoder   )r   r<   r   r9   Ztokens_per_messageZtokens_per_nameZ
num_tokensZmessages_dictmessagekeyvalue	__class__r   r   rG      s    z)ChatEverlyAI.get_num_tokens_from_messages)__name__
__module____qualname____doc__propertyr   r   classmethodr   r   __annotations__r   r2   r"   r,   r#   r$   staticmethodr&   r	   r5   r:   rG   __classcell__r   r   rN   r   r      s"   

-r   )rS   
__future__r   loggingrD   typingr   r   r   r   Zlangchain_core.messagesr   Zlangchain_core.pydantic_v1r   r	   Zlangchain_core.utilsr
   r   Z#langchain_community.adapters.openair   Z&langchain_community.chat_models.openair   r   Ztiktoken	getLoggerrP   r7   r,   r2   r   r   r   r   r   <module>   s   
