U
    hp                     @   s|   d dl mZmZmZmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZmZ d dlmZmZ d dlmZ G dd	 d	e
ZdS )
    )AnyDictListMappingOptionalN)CallbackManagerForLLMRun)LLM)	SecretStrroot_validatorconvert_to_secret_strget_from_dict_or_env)enforce_stop_tokensc                   @   s  e Zd ZU dZdZeed< dZeed< dZ	e
ed< dZeed	< d
Ze
ed< dZe
ed< eed< dZee ed< G dd dZeddeedddZeeeef dddZeeeef dddZeedddZd"eeee  ee eedd d!ZdS )#ForefrontAIa2  ForefrontAI large language models.

    To use, you should have the environment variable ``FOREFRONTAI_API_KEY``
    set with your API key.

    Example:
        .. code-block:: python

            from langchain_community.llms import ForefrontAI
            forefrontai = ForefrontAI(endpoint_url="")
     endpoint_urlgffffff?temperature   lengthg      ?top_p(   top_k   repetition_penaltyforefrontai_api_keyNbase_urlc                   @   s   e Zd ZdZdS )zForefrontAI.ConfigZforbidN)__name__
__module____qualname__extra r    r    H/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/forefrontai.pyConfig1   s   r"   T)pre)valuesreturnc                 C   s   t t|dd|d< |S )z,Validate that api key exists in environment.r   ZFOREFRONTAI_API_KEYr   )clsr$   r    r    r!   validate_environment4   s    
z ForefrontAI.validate_environment)r%   c                 C   s   | j | j| j| j| jdS )z7Get the default parameters for calling ForefrontAI API.r   r   r   r   r   r(   selfr    r    r!   _default_params<   s    zForefrontAI._default_paramsc                 C   s   d| j i| jS )zGet the identifying parameters.r   )r   r+   r)   r    r    r!   _identifying_paramsG   s    zForefrontAI._identifying_paramsc                 C   s   dS )zReturn type of llm.Zforefrontair    r)   r    r    r!   	_llm_typeL   s    zForefrontAI._llm_type)promptstoprun_managerkwargsr%   c           	      K   sb   d| j   }tj| j|ddd|i| j|d}| }|d d d }|d	k	r^t||}|S )
ar  Call out to ForefrontAI's complete endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = ForefrontAI("Tell me a joke.")
        zBearer zapplication/json)AuthorizationzContent-Typetext)urlheadersjsonresultr   
completionN)r   Zget_secret_valuerequestspostr   r+   r6   r   )	r*   r.   r/   r0   r1   Z
auth_valueresponseZresponse_jsonr3   r    r    r!   _callQ   s    
zForefrontAI._call)NN)r   r   r   __doc__r   str__annotations__r   floatr   intr   r   r   r	   r   r   r"   r
   r   r'   propertyr   r   r+   r,   r-   r   r   r<   r    r    r    r!   r      s6   

  
r   )typingr   r   r   r   r   r9   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.pydantic_v1r	   r
   Zlangchain_core.utilsr   r   Zlangchain_community.llms.utilsr   r   r    r    r    r!   <module>   s   