U
    h(+                     @  s   d dl mZ d dlZd dlZd dlZd dlmZmZmZm	Z	m
Z
mZmZmZmZ d dlmZmZ d dlmZ d dlmZ erd dlZe
d ZG dd	 d	eZeeZG d
d deZdS )    )annotationsN)	TYPE_CHECKINGAnyDictListLiteralOptional	TypedDictUnionoverload)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)LLM)PrivateAttr)httpZgrpcc                   @  sB   e Zd ZU dZded< ded< ded< ded< d	ed
< ded< dS )IdentifyingParamsz3Parameters for identifying a model as a typed dict.str
model_nameOptional[str]model_id
server_urlzOptional[ServerType]server_typeboolembeddedDict[str, Any]
llm_kwargsN)__name__
__module____qualname____doc____annotations__ r!   r!   D/tmp/pip-unpacked-wheel-9gdii04g/langchain_community/llms/openllm.pyr       s   
r   c                
      sp  e Zd ZU dZdZded< dZded< dZded< dZded	< d
Z	ded< dZ
ded< ded< eddZded< eddZded< G dd dZed:ddddddddddd Zeddd!d"d#ddd$d%d Zd;dddd
dd&ddddd#ddd' fd(d Zed)d*d+d,Zed-d*d.d/Zed"d*d0d1Zd<d"d2d3dd"d4d5d6Zd=d"d2d7dd"d4d8d9Z  ZS )>OpenLLMa  OpenLLM, supporting both in-process model
    instance and remote OpenLLM servers.

    To use, you should have the openllm library installed:

    .. code-block:: bash

        pip install openllm

    Learn more at: https://github.com/bentoml/openllm

    Example running an LLM model locally managed by OpenLLM:
        .. code-block:: python

            from langchain_community.llms import OpenLLM
            llm = OpenLLM(
                model_name='flan-t5',
                model_id='google/flan-t5-large',
            )
            llm.invoke("What is the difference between a duck and a goose?")

    For all available supported models, you can run 'openllm models'.

    If you have a OpenLLM server running, you can also use it remotely:
        .. code-block:: python

            from langchain_community.llms import OpenLLM
            llm = OpenLLM(server_url='http://localhost:3000')
            llm.invoke("What is the difference between a duck and a goose?")
    Nr   r   r   r      inttimeoutr   
ServerTyper   Tr   r   r   r   )defaultzOptional[openllm.LLMRunner]_runnerzAUnion[openllm.client.HTTPClient, openllm.client.GrpcClient, None]_clientc                   @  s   e Zd ZdZdS )zOpenLLM.ConfigZforbidN)r   r   r   extrar!   r!   r!   r"   Configd   s   r,   .)r   r   zLiteral[(True, False)]r   None)r   r   r   r   returnc                K  s   d S Nr!   )selfr   r   r   r   r!   r!   r"   __init__g   s    zOpenLLM.__init__)r   r   r   zLiteral[('grpc', 'http')])r   r   r   r.   c                K  s   d S r/   r!   )r0   r   r   r   r!   r!   r"   r1   q   s    )r   r   r&   r   r   )r   r   r   r&   r   r   r   c             
     s  zdd l }W n, tk
r8 }	 ztd|	W 5 d }	~	X Y nX |p@i }|d k	rtd |d krd|d ksltd|dkr||jjn|jj}
|
||}t j	f ||||d d | _
|| _nN|d k	std|jf |||dd	|}t j	f ||||d
 d | _|| _
d S )Nr   zMCould not import openllm. Make sure to install it with 'pip install openllm.'z4'server_url' is provided, returning a openllm.ClientzB'server_url' and {'model_id', 'model_name'} are mutually exclusiver   )r   r&   r   r   z)Must provide 'model_name' or 'server_url'T)r   r   Z
init_localZensure_available)r   r   r   r   )openllmImportErrorloggerdebugAssertionErrorclientZ
HTTPClientZ
GrpcClientsuperr1   r)   r*   ZRunner)r0   r   r   r   r&   r   r   r   r2   eZ
client_clsr7   runner	__class__r!   r"   r1   z   sb    


zopenllm.LLMRunner)r.   c                 C  s   | j dkrtd| j S )a  
        Get the underlying openllm.LLMRunner instance for integration with BentoML.

        Example:
        .. code-block:: python

            llm = OpenLLM(
                model_name='flan-t5',
                model_id='google/flan-t5-large',
                embedded=False,
            )
            tools = load_tools(["serpapi", "llm-math"], llm=llm)
            agent = initialize_agent(
                tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION
            )
            svc = bentoml.Service("langchain-openllm", runners=[llm.runner])

            @svc.api(input=Text(), output=Text())
            def chat(input_text: str):
                return agent.run(input_text)
        Nz5OpenLLM must be initialized locally with 'model_name')r)   
ValueErrorr0   r!   r!   r"   r:      s    
zOpenLLM.runnerr   c              	   C  s   | j dk	r<| j| j j | j j d }| j j d }nX| jdkrNtd| j}| j	}z| jt
| jjd  W n tt
jfk
r   Y nX t| j| j| j| j||dS )zGet the identifying parameters.Nr   r   zRunner must be initialized.configuration)r   r   r   r   r   r   )r*   r   update_config	_metadata
model_dumpr)   r=   r   r   jsonloadsZidentifying_params	TypeErrorJSONDecodeErrorr   r   r   r   )r0   r   r   r!   r!   r"   _identifying_params   s,    

zOpenLLM._identifying_paramsc                 C  s   | j r
dS dS )NZopenllm_clientr2   )r*   r>   r!   r!   r"   	_llm_type   s    zOpenLLM._llm_typezOptional[List[str]]z"Optional[CallbackManagerForLLMRun])promptstoprun_managerkwargsr.   c           
   
   K  s   zdd l }W n, tk
r8 } ztd|W 5 d }~X Y nX t| j}|| |jj| jd f|}| j	r| j	j
|f|jddjd j}	n$| jd k	st| j|f|jdd}	t|	trd|	kr|	d S t|	tr|	S td|	 d S Nr   zMCould not import openllm. Make sure to install it with 'pip install openllm'.r   T)flattentextzCExpected result to be a dict with key 'text' or a string. Received )r2   r3   copydeepcopyr   r@   
AutoConfig	for_modelrH   r*   generaterC   outputsrP   r)   r6   
isinstancedictr   r=   )
r0   rJ   rK   rL   rM   r2   r9   copiedconfigresr!   r!   r"   _call   s:    

zOpenLLM._callz'Optional[AsyncCallbackManagerForLLMRun]c              
     s6  zdd l }W n, tk
r8 } ztd|W 5 d }~X Y nX t| j}|| |jj| jd f|}| j	r|j
| j| j}	|	j|f|jddI d H jd j}
nR| jd k	st| jjj|f|\}}}| jjj|f|I d H }| jjj||f|}
t|
trd|
kr|
d S t|
tr$|
S td|
 d S rN   )r2   r3   rQ   rR   r   r@   rS   rT   rH   r*   r7   ZAsyncHTTPClientr   r&   rU   rC   rV   rP   r)   r6   ZllmZsanitize_parametersZ	async_runZpostprocess_generaterW   rX   r   r=   )r0   rJ   rK   rL   rM   r2   r9   rY   rZ   Zasync_clientr[   Zgenerate_kwargsZpostprocess_kwargsZgenerated_resultr!   r!   r"   _acall   sZ    

 zOpenLLM._acall).)N)NN)NN)r   r   r   r   r   r    r   r   r&   r   r   r   r)   r*   r,   r   r1   propertyr:   rH   rI   r\   r]   __classcell__r!   r!   r;   r"   r#   .   sT   

 	
 "E  *  r#   )
__future__r   rQ   rD   loggingtypingr   r   r   r   r   r   r	   r
   r   Zlangchain_core.callbacksr   r   Z#langchain_core.language_models.llmsr   Zlangchain_core.pydantic_v1r   r2   r'   r   	getLoggerr   r4   r#   r!   r!   r!   r"   <module>   s   ,
