U
    h                     @   s~   d dl Z d dlmZmZmZmZ d dlmZ d dlm	Z	 d dl
mZ d dlmZ d dlmZmZ e eZG dd	 d	eZdS )
    N)AnyDictListOptional)CallbackManagerForLLMRun)BaseLanguageModel)LLM)	AIMessage)get_from_dict_or_envpre_initc                   @   sr   e Zd ZU dZeed< G dd dZeeedddZ	de
eee
  ee ee
d	d
dZee
dddZdS )OpaquePromptsaj  LLM that uses OpaquePrompts to sanitize prompts.

    Wraps another LLM and sanitizes prompts before passing it to the LLM, then
        de-sanitizes the response.

    To use, you should have the ``opaqueprompts`` python package installed,
    and the environment variable ``OPAQUEPROMPTS_API_KEY`` set with
    your API key, or pass it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            from langchain_community.llms import OpaquePrompts
            from langchain_community.chat_models import ChatOpenAI

            op_llm = OpaquePrompts(base_llm=ChatOpenAI())
    base_llmc                   @   s   e Zd ZdZdS )zOpaquePrompts.ConfigZforbidN)__name__
__module____qualname__extra r   r   J/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/opaqueprompts.pyConfig#   s   r   )valuesreturnc                 C   s\   zddl }W n tk
r(   tdY nX |jdkr<tdt|dddd}|sXtd	|S )
zFValidates that the OpaquePrompts API key and the Python package exist.r   NzhCould not import the `opaqueprompts` Python package, please install it with `pip install opaqueprompts`.zMCould not properly import `opaqueprompts`, opaqueprompts.__package__ is None.Zopaqueprompts_api_keyZOPAQUEPROMPTS_API_KEY )defaultzCould not find OPAQUEPROMPTS_API_KEY in the environment. Please set it to your OpaquePrompts API key.You can get it by creating an account on the OpaquePrompts website: https://opaqueprompts.opaque.co/ .)opaquepromptsImportError__package__
ValueErrorr
   )clsr   opZapi_keyr   r   r   validate_environment&   s*    

   z"OpaquePrompts.validate_environmentN)promptstoprun_managerkwargsr   c                 K   sd   ddl }|pt }||g}|jd }| jj|d|}	t|	t	rN|	j
}	|j|	|jd}
|
jS )aC  Call base LLM with sanitization before and de-sanitization after.

        Args:
            prompt: The prompt to pass into the model.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = op_llm.invoke("Tell me a joke.")
        r   N)r!   )secure_context)r   r   Zget_noop_managersanitizeZsanitized_textsr   bindZinvoke
isinstancer	   contentZ
desanitizer$   Zdesanitized_text)selfr    r!   r"   r#   r   Z_run_managerZsanitize_responseZsanitized_prompt_value_strZllm_responseZdesanitize_responser   r   r   _callB   s    

zOpaquePrompts._call)r   c                 C   s   dS )zSReturn type of LLM.

        This is an override of the base class method.
        r   r   )r)   r   r   r   	_llm_typem   s    zOpaquePrompts._llm_type)NN)r   r   r   __doc__r   __annotations__r   r   r   r   strr   r   r   r   r*   propertyr+   r   r   r   r   r      s    
  
+r   )loggingtypingr   r   r   r   Zlangchain_core.callbacksr   Zlangchain_core.language_modelsr   Z#langchain_core.language_models.llmsr   Zlangchain_core.messagesr	   Zlangchain_core.utilsr
   r   	getLoggerr   loggerr   r   r   r   r   <module>   s   
