U
    h                     @   s   d dl Z d dlmZmZmZmZ d dlmZ d dlm	Z	 d dl
mZmZ d dlmZ d dlmZ d dlmZ e eZd	ZG d
d de	ZdS )    N)AnyDictListOptional)CallbackManagerForLLMRun)LLM)
Generation	LLMResult)Field)pre_init)enforce_stop_tokensz8https://clarifai.com/openai/chat-completion/models/GPT-4c                   @   s  e Zd ZU dZdZee ed< dZee ed< dZ	ee ed< dZ
ee ed< dZee ed< eddd	Zee ed
< eddd	Zee ed< eddd	Zeed< dZeed< G dd dZeeedddZeeeef dddZeeeef dddZeedddZd!eeee  ee eeeef  eedddZd"ee eee  ee eeeef  eeddd ZdS )#Clarifaia2  Clarifai large language models.

    To use, you should have an account on the Clarifai platform,
    the ``clarifai`` python package installed, and the
    environment variable ``CLARIFAI_PAT`` set with your PAT key,
    or pass it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            from langchain_community.llms import Clarifai
            clarifai_llm = Clarifai(user_id=USER_ID, app_id=APP_ID, model_id=MODEL_ID)
                             (or)
            clarifai_llm = Clarifai(model_url=EXAMPLE_URL)
    N	model_urlmodel_idmodel_version_idapp_iduser_idT)defaultexcludepattokenmodelzhttps://api.clarifai.comapi_basec                   @   s   e Zd ZdZdS )zClarifai.ConfigZforbidN)__name__
__module____qualname__extra r   r   E/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/clarifai.pyConfig4   s   r   )valuesreturnc              
   C   s   zddl m} W n tk
r,   tdY nX |d}|d}|d}|d}|d}|d	}|d
}	|d}
||||t|d|	|
||d|d< |S )zuValidate that we have all required info to access Clarifai
        platform and python package exists in environment.r   )ModelXCould not import clarifai python package. Please install it with `pip install clarifai`.r   r   r   r   r   r   r   r   )id)urlr   r   Zmodel_versionr   r   r   base_urlr   )Zclarifai.client.modelr"   ImportErrorgetdict)clsr    r"   r   r   r   r   r   r   r   r   r   r   r   validate_environment7   s2    









zClarifai.validate_environment)r!   c                 C   s   i S )z4Get the default parameters for calling Clarifai API.r   selfr   r   r   _default_paramsX   s    zClarifai._default_paramsc                 C   s   | j | j| j| jdS )zGet the identifying parameters.r   r   r   r   r/   r,   r   r   r   _identifying_params]   s    zClarifai._identifying_paramsc                 C   s   dS )zReturn type of llm.Zclarifair   r,   r   r   r   	_llm_typei   s    zClarifai._llm_type)promptstoprun_managerinference_paramskwargsr!   c           	   
   K   s   zR|dkri  }n| | j jt|dd|d}|jd jjj}|dk	rPt||}W n2 tk
r } zt	
d|  W 5 d}~X Y nX |S )a~  Call out to Clarfai's PostModelOutputs endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = clarifai_llm.invoke("Tell me a joke.")
        Nzutf-8text)Z
input_typer5   r   Predict failed, exception: )r   Zpredict_by_bytesbytesoutputsdatar7   rawr   	Exceptionloggererror)	r-   r2   r3   r4   r5   r6   predict_responser7   er   r   r   _calln   s    "zClarifai._call)promptsr3   r4   r5   r6   r!   c              
      s*  zddl m} W n tk
r,   tdY nX g }d}|| jj ztdt||D ]N}	||	|	|  }
 fddt|
D }|dkri  }n| | jj	||d}qV|j
D ]:}|dk	rt|jjj|}n
|jjj}|t|d	g qW n4 tk
r } ztd
|  W 5 d}~X Y nX t|dS )z*Run the LLM on the given prompt and input.r   )Inputsr#       c                    s"   g | ]\}} j t||d qS ))Zinput_idZraw_text)Zget_text_inputstr).0r$   inpZ	input_objr   r   
<listcomp>   s   z&Clarifai._generate.<locals>.<listcomp>N)inputsr5   )r7   r8   )generations)Zclarifai.client.inputrD   r'   Zfrom_auth_helperr   Zauth_helperrangelen	enumerateZpredictr:   r   r;   r7   r<   appendr   r=   r>   r?   r	   )r-   rC   r3   r4   r5   r6   rD   rL   Z
batch_sizeibatchZinput_batchr@   outputr7   rA   r   rI   r   	_generate   sB    

 

"zClarifai._generate)NNN)NNN)r   r   r   __doc__r   r   rF   __annotations__r   r   r   r   r
   r   r   r   r   r   r   r   r   r+   propertyr.   r0   r1   r   r   rB   r	   rT   r   r   r   r   r      sR   
    
*   
r   )loggingtypingr   r   r   r   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   r	   Zlangchain_core.pydantic_v1r
   Zlangchain_core.utilsr   Zlangchain_community.llms.utilsr   	getLoggerr   r>   ZEXAMPLE_URLr   r   r   r   r   <module>   s   
