U
    h"                  	   @  s   d dl mZ d dlmZmZmZmZmZ d dlm	Z	 d dl
mZ d dlmZ d dlmZmZmZ d dlmZmZ d dlmZmZ d d	lmZ d d
lmZ d ddddddddddZdddddZdddddZe	dddG dd deeZdS )!    )annotations)AnyDictIteratorListOptional)
deprecated)CallbackManagerForLLMRun)LanguageModelInput)
GenerationGenerationChunk	LLMResult)	BaseModel	SecretStr)get_from_dict_or_envpre_init)BaseLLM)create_retry_decoratorFN
GooglePalmr
   bool"Optional[CallbackManagerForLLMRun]r   )llmprompt	is_geministreamrun_managerkwargsreturnc                   sD   t   j|d}|dddddd fdd}|f |||d|S )	z*Use tenacity to retry the completion call.)max_retriesr   r
   r   r   )r   r   r   r   r   c                   s8   | di }|r" jj| ||dS  jjf d| i|S )Ngeneration_config)contentsr   r   r   )getclientZgenerate_contentZgenerate_text)r   r   r   r   r   r    H/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/google_palm.py_completion_with_retry   s      z5completion_with_retry.<locals>._completion_with_retry)r   r   r   )r   r   )r   r   r   r   r   r   Zretry_decoratorr&   r$   r#   r%   completion_with_retry   s    	  
  r'   str)
model_namer   c                 C  s   d| kS )NZgeminir$   r)   r$   r$   r%   _is_gemini_model-   s    r+   textr   c                 C  s8   t dd | ddd D }|r0| ddS | S dS )zStrip erroneous leading spaces from text.

    The PaLM API will sometimes erroneously return a single leading space in all
    lines > 1. This function strips that space.
    c                 s  s    | ]}| p|d  dkV  qdS )r    Nr$   ).0liner$   r$   r%   	<genexpr>7   s     z2_strip_erroneous_leading_spaces.<locals>.<genexpr>
   Nz
 )allsplitreplace)r-   Zhas_leading_spacer$   r$   r%   _strip_erroneous_leading_spaces1   s     r7   z0.0.12z)langchain_google_genai.GoogleGenerativeAI)Zalternative_importc                   @  s$  e Zd ZU dZded< ded< dZded< d	Zd
ed< dZded< dZded< dZ	ded< dZ
ded< dZded< eddddZeddddZeddddZed dd!d"Zed#d#d$d%d&Zd6d d'd(dd)d*d+d,Zd7dd'd(dd-d.d/d0Zeddd1d2Zddd3d4d5ZdS )8r   zg
    DEPRECATED: Use `langchain_google_genai.GoogleGenerativeAI` instead.

    Google PaLM models.
    r   r"   zOptional[SecretStr]google_api_keyzmodels/text-bison-001r(   r)   gffffff?floattemperatureNzOptional[float]top_pzOptional[int]top_kmax_output_tokensr3   intn   r   r   )r   c                 C  s
   t | jS )z=Returns whether a model is belongs to a Gemini family or not.)r+   r)   selfr$   r$   r%   r   \   s    zGooglePalm.is_geminizDict[str, str]c                 C  s   ddiS )Nr8   GOOGLE_API_KEYr$   rA   r$   r$   r%   
lc_secretsa   s    zGooglePalm.lc_secretsc                 C  s   dS )NTr$   rA   r$   r$   r%   is_lc_serializablee   s    zGooglePalm.is_lc_serializablez	List[str]c                 C  s
   dddgS )z*Get the namespace of the langchain object.Z	langchainZllmsgoogle_palmr$   )clsr$   r$   r%   get_lc_namespacei   s    zGooglePalm.get_lc_namespacer   )valuesr   c                 C  s&  t |dd}|d }zPddlm} t|tr4| }|j|d t|rZ|j|d|d< n||d< W n t	k
r   t	d	Y nX |d
 dk	rd|d
   krdksn t
d|d dk	rd|d   krdksn t
d|d dk	r|d dkrt
d|d dk	r"|d dkr"t
d|S )z(Validate api key, python package exists.r8   rC   r)   r   N)Zapi_keyr*   r"   znCould not import google-generativeai python package. Please install it with `pip install google-generativeai`.r:   r3   z+temperature must be in the range [0.0, 1.0]r;   z%top_p must be in the range [0.0, 1.0]r<   ztop_k must be positiver=   z+max_output_tokens must be greater than zero)r   Zgoogle.generativeaiZgenerativeai
isinstancer   Zget_secret_value	configurer+   ZGenerativeModelImportError
ValueError)rG   rI   r8   r)   Zgenair$   r$   r%   validate_environmentn   s6      

&&zGooglePalm.validate_environmentzOptional[List[str]]r   r   )promptsstopr   r   r   c              	   K  s   g }|| j | j| j| j| jd}|D ]}| jrht| |dd||d}dd |jD }	|dd |	D  q$t| f| j	|dd|d|}g }
|jD ]$}|d	 }t
|}|
t|d
 q||
 q$t|dS )N)stop_sequencesr:   r;   r<   r=   Zcandidate_countFT)r   r   r   r   r   c                 S  s$   g | ]}d  dd |jjD qS ) c                 S  s   g | ]
}|j qS r$   r-   )r/   pr$   r$   r%   
<listcomp>   s     z3GooglePalm._generate.<locals>.<listcomp>.<listcomp>)joincontentpartsr/   cr$   r$   r%   rU      s    z(GooglePalm._generate.<locals>.<listcomp>c                 S  s   g | ]}t |d qS )rS   )r   rY   r$   r$   r%   rU      s     )modelr   r   r   r   outputrS   )generations)r:   r;   r<   r=   r?   r   r'   
candidatesappendr)   r7   r   r   )rB   rO   rP   r   r   r]   r   r   resr^   Zprompt_generations	candidateZraw_textZstripped_textr$   r$   r%   	_generate   sP    	
zGooglePalm._generatezIterator[GenerationChunk])r   rP   r   r   r   c                 k  sh   | di }|r||d< t| |fdd||d|D ].}t|jd}|V  |r4|j|j|| jd q4d S )Nr   rQ   T)r   r   r   r   rS   )chunkverbose)r!   r'   r   r-   Zon_llm_new_tokenrd   )rB   r   rP   r   r   r   Zstream_resprc   r$   r$   r%   _stream   s,    
	zGooglePalm._streamc                 C  s   dS )zReturn type of llm.rF   r$   rA   r$   r$   r%   	_llm_type   s    zGooglePalm._llm_typer,   c                 C  s(   | j rtd| jj| j|d}|d S )a  Get the number of tokens present in the text.

        Useful for checking if an input will fit in a model's context window.

        Args:
            text: The string input to tokenize.

        Returns:
            The integer number of tokens in the text.
        z%Counting tokens is not yet supported!)r[   r   Ztoken_count)r   rM   r"   Zcount_text_tokensr)   )rB   r-   resultr$   r$   r%   get_num_tokens   s    zGooglePalm.get_num_tokens)NN)NN)__name__
__module____qualname____doc____annotations__r)   r:   r;   r<   r=   r?   r   propertyr   rD   classmethodrE   rH   r   rN   rb   re   rf   rh   r$   r$   r$   r%   r   >   s:   
)  4  )FFN) 
__future__r   typingr   r   r   r   r   Zlangchain_core._api.deprecationr   Zlangchain_core.callbacksr	   Zlangchain_core.language_modelsr
   Zlangchain_core.outputsr   r   r   Zlangchain_core.pydantic_v1r   r   Zlangchain_core.utilsr   r   Zlangchain_community.llmsr   Z&langchain_community.utilities.vertexair   r'   r+   r7   r   r$   r$   r$   r%   <module>   s"      
