mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-17 10:13:29 +00:00
ollama[patch]: permit streaming for tool calls (#28654)
Resolves https://github.com/langchain-ai/langchain/issues/28543
Ollama recently
[released](https://github.com/ollama/ollama/releases/tag/v0.4.6) support
for streaming tool calls. Previously we would override the `stream`
parameter if tools were passed in.
Covered in standard tests here:
c1d348e95d/libs/standard-tests/langchain_tests/integration_tests/chat_models.py (L893-L897)
Before, the test generates one message chunk:
```python
[
AIMessageChunk(
content='',
additional_kwargs={},
response_metadata={
'model': 'llama3.1',
'created_at': '2024-12-10T17:49:04.468487Z',
'done': True,
'done_reason': 'stop',
'total_duration': 525471208,
'load_duration': 19701000,
'prompt_eval_count': 170,
'prompt_eval_duration': 31000000,
'eval_count': 17,
'eval_duration': 473000000,
'message': Message(
role='assistant',
content='',
images=None,
tool_calls=[
ToolCall(
function=Function(name='magic_function', arguments={'input': 3})
)
]
)
},
id='run-552bbe0f-8fb2-4105-ada1-fa38c1db444d',
tool_calls=[
{
'name': 'magic_function',
'args': {'input': 3},
'id': 'b0a4dc07-7d7a-487b-bd7b-ad062c2363a2',
'type': 'tool_call',
},
],
usage_metadata={
'input_tokens': 170, 'output_tokens': 17, 'total_tokens': 187
},
tool_call_chunks=[
{
'name': 'magic_function',
'args': '{"input": 3}',
'id': 'b0a4dc07-7d7a-487b-bd7b-ad062c2363a2',
'index': None,
'type': 'tool_call_chunk',
}
]
)
]
```
After, it generates two (tool call in one, response metadata in
another):
```python
[
AIMessageChunk(
content='',
additional_kwargs={},
response_metadata={},
id='run-9a3f0860-baa1-4bae-9562-13a61702de70',
tool_calls=[
{
'name': 'magic_function',
'args': {'input': 3},
'id': '5bbaee2d-c335-4709-8d67-0783c74bd2e0',
'type': 'tool_call',
},
],
tool_call_chunks=[
{
'name': 'magic_function',
'args': '{"input": 3}',
'id': '5bbaee2d-c335-4709-8d67-0783c74bd2e0',
'index': None,
'type': 'tool_call_chunk',
},
],
),
AIMessageChunk(
content='',
additional_kwargs={},
response_metadata={
'model': 'llama3.1',
'created_at': '2024-12-10T17:46:43.278436Z',
'done': True,
'done_reason': 'stop',
'total_duration': 514282750,
'load_duration': 16894458,
'prompt_eval_count': 170,
'prompt_eval_duration': 31000000,
'eval_count': 17,
'eval_duration': 464000000,
'message': Message(
role='assistant', content='', images=None, tool_calls=None
),
},
id='run-9a3f0860-baa1-4bae-9562-13a61702de70',
usage_metadata={
'input_tokens': 170, 'output_tokens': 17, 'total_tokens': 187
}
),
]
```
This commit is contained in:
parent
704059466a
commit
bc4dc7f4b1
@ -306,8 +306,6 @@ class ChatOllama(BaseChatModel):
|
||||
'{"location": "Pune, India", "time_of_day": "morning"}'
|
||||
|
||||
Tool Calling:
|
||||
.. warning::
|
||||
Ollama currently does not support streaming for tools
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@ -459,12 +457,9 @@ class ChatOllama(BaseChatModel):
|
||||
},
|
||||
)
|
||||
|
||||
tools = kwargs.get("tools")
|
||||
default_stream = not bool(tools)
|
||||
|
||||
params = {
|
||||
"messages": ollama_messages,
|
||||
"stream": kwargs.pop("stream", default_stream),
|
||||
"stream": kwargs.pop("stream", True),
|
||||
"model": kwargs.pop("model", self.model),
|
||||
"format": kwargs.pop("format", self.format),
|
||||
"options": Options(**options_dict),
|
||||
@ -472,7 +467,7 @@ class ChatOllama(BaseChatModel):
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
if tools:
|
||||
if tools := kwargs.get("tools"):
|
||||
params["tools"] = tools
|
||||
|
||||
return params
|
||||
|
Loading…
Reference in New Issue
Block a user