mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-19 00:58:32 +00:00
openai[patch]: add attribute to always use previous_response_id (#31734)
This commit is contained in:
@@ -607,6 +607,40 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
.. versionadded:: 0.3.24
|
||||
"""
|
||||
|
||||
use_previous_response_id: bool = False
|
||||
"""If True, always pass ``previous_response_id`` using the ID of the most recent
|
||||
response. Responses API only.
|
||||
|
||||
Input messages up to the most recent response will be dropped from request
|
||||
payloads.
|
||||
|
||||
For example, the following two are equivalent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
llm = ChatOpenAI(
|
||||
model="o4-mini",
|
||||
use_previous_response_id=True,
|
||||
)
|
||||
llm.invoke(
|
||||
[
|
||||
HumanMessage("Hello"),
|
||||
AIMessage("Hi there!", response_metadata={"id": "resp_123"}),
|
||||
HumanMessage("How are you?"),
|
||||
]
|
||||
)
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
llm = ChatOpenAI(
|
||||
model="o4-mini",
|
||||
use_responses_api=True,
|
||||
)
|
||||
llm.invoke([HumanMessage("How are you?")], previous_response_id="resp_123")
|
||||
|
||||
.. versionadded:: 0.3.26
|
||||
"""
|
||||
|
||||
use_responses_api: Optional[bool] = None
|
||||
"""Whether to use the Responses API instead of the Chat API.
|
||||
|
||||
@@ -1081,6 +1115,8 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
return True
|
||||
elif self.truncation is not None:
|
||||
return True
|
||||
elif self.use_previous_response_id:
|
||||
return True
|
||||
else:
|
||||
return _use_responses_api(payload)
|
||||
|
||||
@@ -1097,7 +1133,14 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
|
||||
payload = {**self._default_params, **kwargs}
|
||||
if self._use_responses_api(payload):
|
||||
payload = _construct_responses_api_payload(messages, payload)
|
||||
if self.use_previous_response_id:
|
||||
last_messages, previous_response_id = _get_last_messages(messages)
|
||||
payload_to_use = last_messages if previous_response_id else messages
|
||||
if previous_response_id:
|
||||
payload["previous_response_id"] = previous_response_id
|
||||
payload = _construct_responses_api_payload(payload_to_use, payload)
|
||||
else:
|
||||
payload = _construct_responses_api_payload(messages, payload)
|
||||
else:
|
||||
payload["messages"] = [_convert_message_to_dict(m) for m in messages]
|
||||
return payload
|
||||
@@ -3202,6 +3245,30 @@ def _use_responses_api(payload: dict) -> bool:
|
||||
return bool(uses_builtin_tools or responses_only_args.intersection(payload))
|
||||
|
||||
|
||||
def _get_last_messages(
|
||||
messages: Sequence[BaseMessage],
|
||||
) -> tuple[Sequence[BaseMessage], Optional[str]]:
|
||||
"""
|
||||
Return
|
||||
1. Every message after the most-recent AIMessage that has a non-empty
|
||||
``response_metadata["id"]`` (may be an empty list),
|
||||
2. That id.
|
||||
|
||||
If the most-recent AIMessage does not have an id (or there is no
|
||||
AIMessage at all) the entire conversation is returned together with ``None``.
|
||||
"""
|
||||
for i in range(len(messages) - 1, -1, -1):
|
||||
msg = messages[i]
|
||||
if isinstance(msg, AIMessage):
|
||||
response_id = msg.response_metadata.get("id")
|
||||
if response_id:
|
||||
return messages[i + 1 :], response_id
|
||||
else:
|
||||
return messages, None
|
||||
|
||||
return messages, None
|
||||
|
||||
|
||||
def _construct_responses_api_payload(
|
||||
messages: Sequence[BaseMessage], payload: dict
|
||||
) -> dict:
|
||||
|
Reference in New Issue
Block a user