core[patch]: exclude model cache from ser (#27086)

This commit is contained in:
Bagatur 2024-10-03 15:00:31 -07:00 committed by GitHub
parent c09da53978
commit 87fc5ce688
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 2 additions and 32 deletions

View File

@ -98,7 +98,7 @@ class BaseLanguageModel(
All language model wrappers inherited from BaseLanguageModel.
"""
cache: Union[BaseCache, bool, None] = None
cache: Union[BaseCache, bool, None] = Field(default=None, exclude=True)
"""Whether to cache the response.
* If true, will use the global cache.

View File

@ -303,9 +303,7 @@ def test_llm_representation_for_serializable() -> None:
chat = CustomChat(cache=cache, messages=iter([]))
assert chat._get_llm_string() == (
'{"id": ["tests", "unit_tests", "language_models", "chat_models", '
'"test_cache", "CustomChat"], "kwargs": {"cache": {"id": ["tests", '
'"unit_tests", "language_models", "chat_models", "test_cache", '
'"InMemoryCache"], "lc": 1, "type": "not_implemented"}, "messages": {"id": '
'"test_cache", "CustomChat"], "kwargs": {"messages": {"id": '
'["builtins", "list_iterator"], "lc": 1, "type": "not_implemented"}}, "lc": '
'1, "name": "CustomChat", "type": "constructor"}---[(\'stop\', None)]'
)
@ -324,20 +322,6 @@ def test_cleanup_serialized() -> None:
"CustomChat",
],
"kwargs": {
"cache": {
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"language_models",
"chat_models",
"test_cache",
"InMemoryCache",
],
"repr": "<tests.unit_tests.language_models.chat_models."
"test_cache.InMemoryCache object at 0x79ff437fe7d0>",
},
"messages": {
"lc": 1,
"type": "not_implemented",
@ -380,18 +364,6 @@ def test_cleanup_serialized() -> None:
"CustomChat",
],
"kwargs": {
"cache": {
"id": [
"tests",
"unit_tests",
"language_models",
"chat_models",
"test_cache",
"InMemoryCache",
],
"lc": 1,
"type": "not_implemented",
},
"messages": {
"id": ["builtins", "list_iterator"],
"lc": 1,

View File

@ -101,7 +101,6 @@ def test_configurable() -> None:
"name": None,
"bound": {
"name": None,
"cache": None,
"disable_streaming": False,
"disabled_params": None,
"model_name": "gpt-4o",
@ -189,7 +188,6 @@ def test_configurable_with_default() -> None:
"name": None,
"bound": {
"name": None,
"cache": None,
"disable_streaming": False,
"model": "claude-3-sonnet-20240229",
"max_tokens": 1024,