mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-17 23:41:46 +00:00
community[patch]: Added support for Ollama's num_predict option in ChatOllama (#16633)
Just a simple default addition to the options payload for a ollama generate call to support a max_new_tokens parameter. Should fix issue: https://github.com/langchain-ai/langchain/issues/14715
This commit is contained in:
@@ -88,6 +88,7 @@ def test_handle_kwargs_top_level_parameters(monkeypatch: MonkeyPatch) -> None:
|
||||
"num_ctx": None,
|
||||
"num_gpu": None,
|
||||
"num_thread": None,
|
||||
"num_predict": None,
|
||||
"repeat_last_n": None,
|
||||
"repeat_penalty": None,
|
||||
"stop": [],
|
||||
@@ -133,6 +134,7 @@ def test_handle_kwargs_with_unknown_param(monkeypatch: MonkeyPatch) -> None:
|
||||
"num_ctx": None,
|
||||
"num_gpu": None,
|
||||
"num_thread": None,
|
||||
"num_predict": None,
|
||||
"repeat_last_n": None,
|
||||
"repeat_penalty": None,
|
||||
"stop": [],
|
||||
|
Reference in New Issue
Block a user