U
    hm                     @  s  d dl mZ d dlZd dlZd dlmZ d dlmZ d dlmZm	Z	m
Z
mZmZmZmZmZmZmZ d dlmZmZ d dlmZ d dlmZ d d	lmZmZmZ d d
lmZmZm Z  d dl!m"Z" d dl#m$Z$ erd dl%Z%d dl&m'Z' d dl(m)Z) G dd deZ*G dd deZ+ddddZ,ddddZ-dddddZ.dddd d!Z/eee+ e*ed" ed# f Z0G d$d% d%eee0f Z1dS )&    )annotationsN)JSONDecodeError)sleep)
TYPE_CHECKINGAnyCallableDictListOptionalSequenceTupleTypeUnion)AgentActionAgentFinish)CallbackManager)dumpd)	BaseModelFieldroot_validator)RunnableConfigRunnableSerializableensure_config)BaseTool)convert_to_openai_tool)ThreadMessage)RequiredActionFunctionToolCallc                   @  s4   e Zd ZU dZded< ded< eddddZd	S )
OpenAIAssistantFinishzuAgentFinish with run and thread metadata.

    Parameters:
        run_id: Run id.
        thread_id: Thread id.
    strrun_id	thread_idboolreturnc                 C  s   dS z]Check if the class is serializable by LangChain.

        Returns:
            False
        F clsr%   r%   J/tmp/pip-unpacked-wheel-bo69hh5q/langchain/agents/openai_assistant/base.pyis_lc_serializable/   s    z(OpenAIAssistantFinish.is_lc_serializableN__name__
__module____qualname____doc____annotations__classmethodr)   r%   r%   r%   r(   r   $   s
   
r   c                   @  s<   e Zd ZU dZded< ded< ded< edddd	Zd
S )OpenAIAssistantActionzAgentAction with info needed to submit custom tool output to existing run.

    Parameters:
        tool_call_id: Tool call id.
        run_id: Run id.
        thread_id: Thread id
    r   tool_call_idr   r    r!   r"   c                 C  s   dS r$   r%   r&   r%   r%   r(   r)   F   s    z(OpenAIAssistantAction.is_lc_serializableNr*   r%   r%   r%   r(   r1   9   s   
r1   zopenai.OpenAIr"   c               
   C  sn   zdd l } |  W S  tk
r> } ztd|W 5 d }~X Y n, tk
rh } ztd|W 5 d }~X Y nX d S Nr   zBUnable to import openai, please install with `pip install openai`.zuPlease make sure you are using a v1.1-compatible version of openai. You can install with `pip install "openai>=1.1"`.)openaiZOpenAIImportErrorAttributeErrorr4   er%   r%   r(   _get_openai_clientP   s    
r9   zopenai.AsyncOpenAIc               
   C  sn   zdd l } |  W S  tk
r> } ztd|W 5 d }~X Y n, tk
rh } ztd|W 5 d }~X Y nX d S r3   )r4   AsyncOpenAIr5   r6   r7   r%   r%   r(   _get_openai_async_client`   s    
r;   z:Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]r!   )toolr#   c                 C  s"   d}t | to d| ko | d |kS )z<Determine if tool corresponds to OpenAI Assistants built-in.)Zcode_interpreterZ	retrievaltype)
isinstancedict)r<   Zassistants_builtin_toolsr%   r%   r(   _is_assistants_builtin_toolp   s    

r@   zDict[str, Any]c                 C  s   t | r| S t| S dS )zConvert a raw function/class to an OpenAI tool.

    Note that OpenAI assistants supports several built-in tools,
    such as "code_interpreter" and "retrieval."
    N)r@   r   )r<   r%   r%   r(   _get_assistants_tool|   s    rA   r   r   c                
   @  s  e Zd ZU dZeedZded< dZded< ded< d	Z	d
ed< dZ
ded< eddddddddZeddddddddd dddZdBddddddZedd ddddd!dd d"d#d$ZdCddddd%d&d'Zd(dd)d*d+Zddd,d-d.Zdddd/d0d1Zddd2d3d4Zdddd5d6d7Zd(dd)d8d9Zddd,d:d;Zdddd/d<d=Zddd2d>d?Zdddd5d@dAZdS )DOpenAIAssistantRunnablea  Run an OpenAI Assistant.

    Example using OpenAI tools:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable

            interpreter_assistant = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=[{"type": "code_interpreter"}],
                model="gpt-4-1106-preview"
            )
            output = interpreter_assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"})

    Example using custom tools and AgentExecutor:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            agent_executor = AgentExecutor(agent=agent, tools=tools)
            agent_executor.invoke({"content": "What's 10 - 4 raised to the 2.7"})


    Example using custom tools and custom execution:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain_core.agents import AgentFinish
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            def execute_agent(agent, tools, input):
                tool_map = {tool.name: tool for tool in tools}
                response = agent.invoke(input)
                while not isinstance(response, AgentFinish):
                    tool_outputs = []
                    for action in response:
                        tool_output = tool_map[action.tool].invoke(action.tool_input)
                        tool_outputs.append({"output": tool_output, "tool_call_id": action.tool_call_id})
                    response = agent.invoke(
                        {
                            "tool_outputs": tool_outputs,
                            "run_id": action.run_id,
                            "thread_id": action.thread_id
                        }
                    )

                return response

            response = execute_agent(agent, tools, {"content": "What's 10 - 4 raised to the 2.7"})
            next_response = execute_agent(agent, tools, {"content": "now add 17.241", "thread_id": response.thread_id})

    )default_factoryr   clientNasync_clientr   assistant_idg     @@floatcheck_every_msFr!   as_agentT)preZskip_on_failurer?   )valuesr#   c                 C  s2   |d d kr.dd l }|d j}|j|d|d< |S )NrE   r   rD   )api_key)r4   rL   r:   )r'   rK   r4   rL   r%   r%   r(   validate_async_client   s
    
z-OpenAIAssistantRunnable.validate_async_client)rD   zSequence[Union[BaseTool, dict]]z2Optional[Union[openai.OpenAI, openai.AzureOpenAI]])nameinstructionstoolsmodelrD   kwargsr#   c                K  s@   |pt  }|jjj||dd |D |d}| f |j|d|S )a*  Create an OpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            client: OpenAI or AzureOpenAI client.
                Will create a default OpenAI client if not specified.
            kwargs: Additional arguments.

        Returns:
            OpenAIAssistantRunnable configured to run using the created assistant.
        c                 S  s   g | ]}t |qS r%   rA   .0r<   r%   r%   r(   
<listcomp>  s     z<OpenAIAssistantRunnable.create_assistant.<locals>.<listcomp>rN   rO   rP   rQ   )rF   rD   )r9   beta
assistantscreateid)r'   rN   rO   rP   rQ   rD   rR   	assistantr%   r%   r(   create_assistant   s    
z(OpenAIAssistantRunnable.create_assistantzOptional[RunnableConfig]
OutputType)inputconfigr#   c              
   C  s  t |}tj|d|d|dd}|jt| ||dd}z| jrz|drz| |d }| jj	j
jjf |}nd|krd	|d
 |ddg|dd}| ||}nPd|kr| jj	j
jj|d |d
 d	|dd}| |}n| jj	j
jjf |}| |j|j}W n2 tk
rH }	 z||	 |	W 5 d}	~	X Y nX z| |}
W n: tk
r }	 z|j|	| d |	W 5 d}	~	X Y nX ||
 |
S dS )a  Invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when new thread being created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                run_metadata: Metadata to associate with new run.
            config: Runnable config. Defaults to None.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        	callbackstagsmetadataZinheritable_callbacksZinheritable_tagsZinheritable_metadatarun_namerN   intermediate_stepsr    usercontentmessage_metadataroleri   rc   thread_metadatamessagesrc   r   ri   rl   rc   Nrc   )r   r   	configuregeton_chain_startr   rI   _parse_intermediate_stepsrD   rX   threadsrunssubmit_tool_outputs_create_thread_and_runro   rZ   _create_run_wait_for_runr[   r    BaseExceptionon_chain_error_get_responser?   on_chain_end)selfr_   r`   callback_managerrun_managertool_outputsrunthread_r8   responser%   r%   r(   invoke  s\      


zOpenAIAssistantRunnable.invoke)rE   z<Optional[Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]])rN   rO   rP   rQ   rE   rR   r#   c          	        sJ   |pt  }dd |D }|jjj||||dI dH }| f |j|d|S )a	  Async create an AsyncOpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            async_client: AsyncOpenAI client.
                Will create default async_client if not specified.

        Returns:
            AsyncOpenAIAssistantRunnable configured to run using the created assistant.
        c                 S  s   g | ]}t |qS r%   rS   rT   r%   r%   r(   rV     s     z=OpenAIAssistantRunnable.acreate_assistant.<locals>.<listcomp>rW   N)rF   rE   )r;   rX   rY   rZ   r[   )	r'   rN   rO   rP   rQ   rE   rR   Zopenai_toolsr\   r%   r%   r(   acreate_assistanth  s    
z)OpenAIAssistantRunnable.acreate_assistant)r_   r`   rR   r#   c              
     s  |pi }t j|d|d|dd}|jt| ||dd}z| jr|dr| |d I dH }| jjj	j
jf |I dH }nd	|krd
|d |ddg|dd}| ||I dH }ndd|kr| jjj	jj|d	 |d d
|ddI dH }	| |I dH }n| jjj	j
jf |I dH }| |j|jI dH }W n2 tk
rt }
 z||
 |
W 5 d}
~
X Y nX z| |}W n: tk
r }
 z|j|
| d |
W 5 d}
~
X Y nX || |S dS )a  Async invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with a new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when a new thread is created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                run_metadata: Metadata to associate with new run.
            config: Runnable config. Defaults to None.
            kwargs: Additional arguments.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        ra   rb   rc   rd   re   rf   rg   Nr    rh   ri   rj   rk   rm   rn   r   rp   rq   )r   rr   rs   rt   r   rI   _aparse_intermediate_stepsrE   rX   rv   rw   rx   _acreate_thread_and_runro   rZ   _acreate_run_await_for_runr[   r    r|   r}   r~   r?   r   )r   r_   r`   rR   r   r   r   r   r   r   r8   r   r%   r%   r(   ainvoke  sd      





zOpenAIAssistantRunnable.ainvokez'List[Tuple[OpenAIAssistantAction, str]])rg   r#   c                   sb   |d \}}|  |j|j}t  |jr<dd |jjjD   fdd|D }||j|jd}|S )Nc                 S  s   h | ]
}|j qS r%   r[   rU   Ztcr%   r%   r(   	<setcomp>  s    zDOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<setcomp>c                   s*   g | ]"\}}|j  krt||j d qS )outputr2   r2   r   rU   actionr   Zrequired_tool_call_idsr%   r(   rV     s   
zEOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<listcomp>r   r   r    r{   r   r    setrequired_actionrx   
tool_callsr   rg   Zlast_actionZlast_outputr   r   rx   r%   r   r(   ru     s    
z1OpenAIAssistantRunnable._parse_intermediate_steps)r_   r#   c                 C  s6   dd |  D }| jjjjj|d fd| ji|S )Nc                 S  s   i | ]\}}|d kr||qS )rO   rQ   rP   Zrun_metadatar%   rU   kvr%   r%   r(   
<dictcomp>  s    z7OpenAIAssistantRunnable._create_run.<locals>.<dictcomp>r    rF   )itemsrD   rX   rv   rw   rZ   rF   r   r_   paramsr%   r%   r(   rz     s    z#OpenAIAssistantRunnable._create_run)r_   r   r#   c                 C  s4   dd |  D }| jjjjf | j|d|}|S )Nc                 S  s   i | ]\}}|d kr||qS r   r%   r   r%   r%   r(   r     s    zBOpenAIAssistantRunnable._create_thread_and_run.<locals>.<dictcomp>rF   r   )r   rD   rX   rv   create_and_runrF   r   r_   r   r   r   r%   r%   r(   ry     s    z.OpenAIAssistantRunnable._create_thread_and_run)r   r#   c                   s  j dkrdd l t jjdd }t jjdd }|dkpT|dkoT|dk| jjjj	j
jdd}fdd	|D }| js|S d
d	 |D }t fdd|D rddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]}|j}	ztj|	jdd}
W n> tk
rl } ztd|	j d|	j |W 5 d }~X Y nX t|
dkrd|
kr|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| dd S )N	completedr   .      ascorderc                   s   g | ]}|j  jkr|qS r%   r   r[   rU   msgr   r%   r(   rV   !  s      z9OpenAIAssistantRunnable._get_response.<locals>.<listcomp>c                 S  s   g | ]}|j D ]}|qqS r%   ri   rU   r   Zmsg_contentr%   r%   r(   rV   $  s      c                 3  s6   | ].}rt | jjjjnt | jjjjV  qd S Nr>   typesrX   rv   ZTextContentBlockZMessageContentTextrU   ri   r4   version_gte_1_14r%   r(   	<genexpr>'  s    
z8OpenAIAssistantRunnable._get_response.<locals>.<genexpr>
c                 s  s   | ]}|j jV  qd S r   textvaluer   r%   r%   r(   r   1  s     r   r    r    Zreturn_valueslogr   r    requires_actionFstrict*Received invalid JSON function arguments:  for function __arg1r<   Z
tool_inputr2   r   r   r       indentUnexpected run status: . Full run info:

)) statusr4   intversionVERSIONsplitrD   rX   rv   ro   listr    rI   alljoinr   r[   r   rx   r   functionjsonloads	argumentsr   
ValueErrorrN   lenappendr1   dumpsr?   r   r   major_versionminor_versionro   Znew_messagesZansweractionsZ	tool_callr   argsr8   Zrun_infor%   r4   r   r   r(   r~     sz    
 



z%OpenAIAssistantRunnable._get_response)r   r    r#   c                 C  s@   d}|r<| j jjjj||d}|jdk}|rt| jd  q|S NT)r    )in_progressZqueuedi  )rD   rX   rv   rw   retriever   r   rH   r   r   r    r   r   r%   r%   r(   r{   \  s    
z%OpenAIAssistantRunnable._wait_for_runc                   sh   |d \}}|  |j|jI d H }t  |jrBdd |jjjD   fdd|D }||j|jd}|S )Nr   c                 S  s   h | ]
}|j qS r%   r   r   r%   r%   r(   r   l  s    zEOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<setcomp>c                   s*   g | ]"\}}|j  krt||j d qS r   r   r   r   r%   r(   rV   o  s   
zFOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<listcomp>r   r   r   r%   r   r(   r   e  s    
z2OpenAIAssistantRunnable._aparse_intermediate_stepsc                   s<   dd |  D }| jjjjj|d fd| ji|I d H S )Nc                 S  s   i | ]\}}|d kr||qS r   r%   r   r%   r%   r(   r   |  s    z8OpenAIAssistantRunnable._acreate_run.<locals>.<dictcomp>r    rF   )r   rE   rX   rv   rw   rZ   rF   r   r%   r%   r(   r   {  s    z$OpenAIAssistantRunnable._acreate_runc                   s:   dd |  D }| jjjjf | j|d|I d H }|S )Nc                 S  s   i | ]\}}|d kr||qS r   r%   r   r%   r%   r(   r     s    zCOpenAIAssistantRunnable._acreate_thread_and_run.<locals>.<dictcomp>r   )r   rE   rX   rv   r   rF   r   r%   r%   r(   r     s    z/OpenAIAssistantRunnable._acreate_thread_and_runc                   s  j dkrdd l t jjdd }t jjdd }|dkpT|dkoT|dk| jjjj	j
jddI d H }fdd	|D }| js|S d
d	 |D }t fdd|D rddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]}|j}	ztj|	jdd}
W n> tk
rr } ztd|	j d|	j |W 5 d }~X Y nX t|
dkrd|
kr|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| dd S )Nr   r   r   r   r   r   r   c                   s   g | ]}|j  jkr|qS r%   r   r   r   r%   r(   rV     s      z:OpenAIAssistantRunnable._aget_response.<locals>.<listcomp>c                 S  s   g | ]}|j D ]}|qqS r%   r   r   r%   r%   r(   rV     s      c                 3  s6   | ].}rt | jjjjnt | jjjjV  qd S r   r   r   r   r%   r(   r     s    
z9OpenAIAssistantRunnable._aget_response.<locals>.<genexpr>r   c                 s  s   | ]}|j jV  qd S r   r   r   r%   r%   r(   r     s     r   r   r   r   Fr   r   r   r   r   r   r   r   r   r   ) r   r4   r   r   r   r   rE   rX   rv   ro   r   r    rI   r   r   r   r[   r   rx   r   r   r   r   r   r   r   rN   r   r   r1   r   r?   r   r%   r   r(   _aget_response  sz    
 



z&OpenAIAssistantRunnable._aget_responsec                   sN   d}|rJ| j jjjj||dI d H }|jdk}|rt| jd I d H  q|S r   )	rE   rX   rv   rw   r   r   asyncior   rH   r   r%   r%   r(   r     s     
z&OpenAIAssistantRunnable._await_for_run)N)N)r+   r,   r-   r.   r   r9   rD   r/   rE   rH   rI   r   rM   r0   r]   r   r   r   ru   rz   ry   r~   r{   r   r   r   r   r   r%   r%   r%   r(   rB      s8   
M
 " R
 $ WJ	JrB   )2
__future__r   r   r   r   timer   typingr   r   r   r   r	   r
   r   r   r   r   Zlangchain_core.agentsr   r   Zlangchain_core.callbacksr   Zlangchain_core.loadr   Zlangchain_core.pydantic_v1r   r   r   Zlangchain_core.runnablesr   r   r   Zlangchain_core.toolsr   Z%langchain_core.utils.function_callingr   r4   Zopenai.types.beta.threadsr   Z<openai.types.beta.threads.required_action_function_tool_callr   r   r1   r9   r;   r@   rA   r^   rB   r%   r%   r%   r(   <module>   s<   0