From ffd306e85af647f222d0541d1764fec5f0b16ca1 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Sun, 22 Mar 2026 17:26:15 -0400 Subject: [PATCH] cr --- .github/workflows/check_versions.yml | 2 +- .../langchain_core/language_models/base.py | 20 ++++-- .../__snapshots__/test_fallbacks.ambr | 14 ++-- .../__snapshots__/test_runnable.ambr | 48 +++++++------- .../unit_tests/runnables/test_runnable.py | 8 ++- .../langchain_deepseek/chat_models.py | 8 ++- .../deepseek/tests/unit_tests/test_imports.py | 3 + libs/partners/huggingface/Makefile | 8 ++- .../langchain_huggingface/__init__.py | 2 + .../langchain_huggingface/_version.py | 3 + .../chat_models/huggingface.py | 6 ++ .../llms/huggingface_pipeline.py | 7 ++ .../huggingface/scripts/check_version.py | 65 +++++++++++++++++++ .../langchain_openai/chat_models/base.py | 10 ++- libs/partners/perplexity/Makefile | 8 ++- .../langchain_perplexity/__init__.py | 2 + .../langchain_perplexity/_version.py | 3 + .../langchain_perplexity/chat_models.py | 2 + .../perplexity/scripts/check_version.py | 65 +++++++++++++++++++ .../partners/xai/langchain_xai/chat_models.py | 8 ++- 20 files changed, 244 insertions(+), 48 deletions(-) create mode 100644 libs/partners/huggingface/langchain_huggingface/_version.py create mode 100644 libs/partners/huggingface/scripts/check_version.py create mode 100644 libs/partners/perplexity/langchain_perplexity/_version.py create mode 100644 libs/partners/perplexity/scripts/check_version.py diff --git a/.github/workflows/check_versions.yml b/.github/workflows/check_versions.yml index 5ffa2fe1116..2ac1a6ac007 100644 --- a/.github/workflows/check_versions.yml +++ b/.github/workflows/check_versions.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@v6 - - uses: actions/setup-python@v5 + - uses: astral-sh/setup-uv@v6 with: python-version: "3.12" diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index 84a9f7de3e7..d79771d1231 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -182,13 +182,21 @@ class BaseLanguageModel( def model_post_init(self, _context: Any, /) -> None: """Pydantic V2 lifecycle hook called automatically after `__init__`. - Seeds `metadata.versions` with the installed `langchain-core` version so - that every LLM trace carries the package version that produced it. + Seeds `metadata["versions"]` with the installed `langchain-core` + version so that every LLM trace carries the package version that + produced it. Partner packages should **not** override this method. Instead, they - should define a `@model_validator(mode="after")` method named - `_set_version` that calls `_add_version` to append their version to the - same dict. + should define a `@model_validator(mode="after")` that calls + `_add_version` to append their own version to the same dict. + + !!! warning "Validator naming" + + Each subclass's validator **must** have a unique name. Pydantic + replaces — rather than chains — same-named `model_validator` methods + in child classes. For example, a `BaseChatOpenAI` subclass should + use `_set__version`, not `_set_version`, to avoid silently + dropping the parent's entry. Args: _context: Pydantic validation context (typically `None`). @@ -198,7 +206,7 @@ class BaseLanguageModel( self._add_version("langchain-core", VERSION) def _add_version(self, pkg: str, version: str) -> None: - """Record a package version in ``metadata.versions`` for tracing. + """Record a package version in `metadata.versions` for tracing. Each layer in the class hierarchy (core -> langchain -> partner) calls this so that the resulting metadata dict accumulates *all* diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_fallbacks.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_fallbacks.ambr index a5e3dd82318..8f2eae4265e 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_fallbacks.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_fallbacks.ambr @@ -84,7 +84,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['foo'], i=1)", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo'], i=1)", "name": "FakeListLLM" } }, @@ -128,7 +128,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['bar'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['bar'])", "name": "FakeListLLM" } }, @@ -268,7 +268,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['foo'], i=1)", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo'], i=1)", "name": "FakeListLLM" }, "fallbacks": [ @@ -281,7 +281,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['bar'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['bar'])", "name": "FakeListLLM" } ], @@ -322,7 +322,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['foo'], i=1)", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo'], i=1)", "name": "FakeListLLM" }, "fallbacks": [ @@ -335,7 +335,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['baz'], i=1)", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['baz'], i=1)", "name": "FakeListLLM" }, { @@ -347,7 +347,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['bar'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['bar'])", "name": "FakeListLLM" } ], diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index 0f04a93366a..3e6622e255c 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -97,7 +97,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo, bar'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo, bar'])", "name": "FakeListChatModel" } ], @@ -227,7 +227,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['baz, qux'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['baz, qux'])", "name": "FakeListChatModel" } ], @@ -346,7 +346,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo, bar'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo, bar'])", "name": "FakeListChatModel" }, { @@ -457,7 +457,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['baz, qux'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['baz, qux'])", "name": "FakeListChatModel" } ], @@ -848,7 +848,7 @@ "fake", "FakeStreamingListLLM" ], - "repr": "FakeStreamingListLLM(responses=['first item, second item, third item'])", + "repr": "FakeStreamingListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['first item, second item, third item'])", "name": "FakeStreamingListLLM" }, { @@ -884,7 +884,7 @@ "fake", "FakeStreamingListLLM" ], - "repr": "FakeStreamingListLLM(responses=['this', 'is', 'a', 'test'])", + "repr": "FakeStreamingListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['this', 'is', 'a', 'test'])", "name": "FakeStreamingListLLM" } }, @@ -1009,7 +1009,7 @@ # name: test_prompt_with_chat_model ''' ChatPromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], input_types={}, partial_variables={}, template='You are a nice assistant.'), additional_kwargs={}), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, template='{question}'), additional_kwargs={})]) - | FakeListChatModel(responses=['foo']) + | FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo']) ''' # --- # name: test_prompt_with_chat_model.1 @@ -1109,7 +1109,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo'])", "name": "FakeListChatModel" } }, @@ -1220,7 +1220,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo, bar'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo, bar'])", "name": "FakeListChatModel" } ], @@ -1249,7 +1249,7 @@ # name: test_prompt_with_chat_model_async ''' ChatPromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], input_types={}, partial_variables={}, template='You are a nice assistant.'), additional_kwargs={}), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, template='{question}'), additional_kwargs={})]) - | FakeListChatModel(responses=['foo']) + | FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo']) ''' # --- # name: test_prompt_with_chat_model_async.1 @@ -1349,7 +1349,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo'])", "name": "FakeListChatModel" } }, @@ -1459,7 +1459,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['foo', 'bar'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo', 'bar'])", "name": "FakeListLLM" } }, @@ -1576,7 +1576,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['foo', 'bar'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo', 'bar'])", "name": "FakeListLLM" } ], @@ -1699,7 +1699,7 @@ "fake", "FakeStreamingListLLM" ], - "repr": "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'])", + "repr": "FakeStreamingListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['bear, dog, cat', 'tomato, lettuce, onion'])", "name": "FakeStreamingListLLM" } ], @@ -1867,7 +1867,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['4'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['4'])", "name": "FakeListLLM" } }, @@ -1940,7 +1940,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=['2'])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['2'])", "name": "FakeListLLM" } }, @@ -13407,7 +13407,7 @@ just_to_test_lambda: RunnableLambda(...) } | ChatPromptTemplate(input_variables=['documents', 'question'], input_types={}, partial_variables={}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], input_types={}, partial_variables={}, template='You are a nice assistant.'), additional_kwargs={}), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['documents', 'question'], input_types={}, partial_variables={}, template='Context:\n{documents}\n\nQuestion:\n{question}'), additional_kwargs={})]) - | FakeListChatModel(responses=['foo, bar']) + | FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo, bar']) | CommaSeparatedListOutputParser() ''' # --- @@ -13610,7 +13610,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=['foo, bar'])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=['foo, bar'])", "name": "FakeListChatModel" } ], @@ -13636,8 +13636,8 @@ ChatPromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], input_types={}, partial_variables={}, template='You are a nice assistant.'), additional_kwargs={}), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['question'], input_types={}, partial_variables={}, template='{question}'), additional_kwargs={})]) | RunnableLambda(...) | { - chat: FakeListChatModel(responses=["i'm a chatbot"]), - llm: FakeListLLM(responses=["i'm a textbot"]) + chat: FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=["i'm a chatbot"]), + llm: FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=["i'm a textbot"]) } ''' # --- @@ -13762,7 +13762,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=[\"i'm a chatbot\"])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=[\"i'm a chatbot\"])", "name": "FakeListChatModel" }, "llm": { @@ -13774,7 +13774,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=[\"i'm a textbot\"])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=[\"i'm a textbot\"])", "name": "FakeListLLM" } } @@ -13917,7 +13917,7 @@ "fake_chat_models", "FakeListChatModel" ], - "repr": "FakeListChatModel(responses=[\"i'm a chatbot\"])", + "repr": "FakeListChatModel(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=[\"i'm a chatbot\"])", "name": "FakeListChatModel" }, "kwargs": { @@ -13938,7 +13938,7 @@ "fake", "FakeListLLM" ], - "repr": "FakeListLLM(responses=[\"i'm a textbot\"])", + "repr": "FakeListLLM(metadata={'versions': {'langchain-core': '1.2.20'}}, responses=[\"i'm a textbot\"])", "name": "FakeListLLM" }, "passthrough": { diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index b6848c2cd85..fefcd6e8a47 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -91,6 +91,7 @@ from langchain_core.tracers import ( from langchain_core.tracers._compat import pydantic_copy from langchain_core.tracers.context import collect_runs from langchain_core.utils.pydantic import PYDANTIC_VERSION +from langchain_core.version import VERSION from tests.unit_tests.pydantic_utils import _normalize_schema, _schema from tests.unit_tests.stubs import AnyStr, _any_id_ai_message, _any_id_ai_message_chunk @@ -2149,7 +2150,11 @@ async def test_prompt_with_llm( "value": { "end_time": None, "final_output": None, - "metadata": {"ls_model_type": "llm", "ls_provider": "fakelist"}, + "metadata": { + "ls_model_type": "llm", + "ls_provider": "fakelist", + "versions": {"langchain-core": VERSION}, + }, "name": "FakeListLLM", "start_time": "2023-01-01T00:00:00.000+00:00", "streamed_output": [], @@ -2363,6 +2368,7 @@ async def test_prompt_with_llm_parser( "metadata": { "ls_model_type": "llm", "ls_provider": "fakestreaminglist", + "versions": {"langchain-core": VERSION}, }, "name": "FakeStreamingListLLM", "start_time": "2023-01-01T00:00:00.000+00:00", diff --git a/libs/partners/deepseek/langchain_deepseek/chat_models.py b/libs/partners/deepseek/langchain_deepseek/chat_models.py index aec768239bf..215567bff2d 100644 --- a/libs/partners/deepseek/langchain_deepseek/chat_models.py +++ b/libs/partners/deepseek/langchain_deepseek/chat_models.py @@ -226,8 +226,12 @@ class ChatDeepSeek(BaseChatOpenAI): return ls_params @model_validator(mode="after") - def _set_version(self) -> Self: - """Set package version in metadata.""" + def _set_deepseek_version(self) -> Self: + """Set package version in metadata. + + Named uniquely to avoid shadowing `BaseChatOpenAI._set_version`; + Pydantic replaces same-named validators rather than chaining them. + """ self._add_version("langchain-deepseek", __version__) return self diff --git a/libs/partners/deepseek/tests/unit_tests/test_imports.py b/libs/partners/deepseek/tests/unit_tests/test_imports.py index 7e5902acab1..a7c9b2f3fda 100644 --- a/libs/partners/deepseek/tests/unit_tests/test_imports.py +++ b/libs/partners/deepseek/tests/unit_tests/test_imports.py @@ -1,7 +1,10 @@ +"""Test `langchain_deepseek` public API surface.""" + from langchain_deepseek import __all__ EXPECTED_ALL = ["__version__", "ChatDeepSeek"] def test_all_imports() -> None: + """Verify that `__all__` exports match the expected public API.""" assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/huggingface/Makefile b/libs/partners/huggingface/Makefile index fcc48416c38..f9eb4b936a1 100644 --- a/libs/partners/huggingface/Makefile +++ b/libs/partners/huggingface/Makefile @@ -1,4 +1,4 @@ -.PHONY: all format lint type test tests integration_tests help extended_tests +.PHONY: all format lint type test tests integration_tests help extended_tests check_version # Default target executed when no arguments are given to make. all: help @@ -55,13 +55,17 @@ format format_diff: check_imports: $(shell find langchain_huggingface -name '*.py') $(UV_RUN_LINT) python ./scripts/check_imports.py $^ +check_version: + uv run python ./scripts/check_version.py + ###################### # HELP ###################### help: @echo '----' - @echo 'check_imports - check imports' + @echo 'check_imports - check imports' + @echo 'check_version - validate version consistency' @echo 'format - run code formatters' @echo 'lint - run linters' @echo 'type - run type checking' diff --git a/libs/partners/huggingface/langchain_huggingface/__init__.py b/libs/partners/huggingface/langchain_huggingface/__init__.py index a64efaa4812..df7dbecaff9 100644 --- a/libs/partners/huggingface/langchain_huggingface/__init__.py +++ b/libs/partners/huggingface/langchain_huggingface/__init__.py @@ -1,5 +1,6 @@ """Hugging Face integration for LangChain.""" +from langchain_huggingface._version import __version__ from langchain_huggingface.chat_models import ( ChatHuggingFace, # type: ignore[import-not-found] ) @@ -18,4 +19,5 @@ __all__ = [ "HuggingFaceEndpoint", "HuggingFaceEndpointEmbeddings", "HuggingFacePipeline", + "__version__", ] diff --git a/libs/partners/huggingface/langchain_huggingface/_version.py b/libs/partners/huggingface/langchain_huggingface/_version.py new file mode 100644 index 00000000000..0d13a9af580 --- /dev/null +++ b/libs/partners/huggingface/langchain_huggingface/_version.py @@ -0,0 +1,3 @@ +"""Version information for `langchain-huggingface`.""" + +__version__ = "1.2.1" diff --git a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py index 31cfb85f1cd..c651f048cfc 100644 --- a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py +++ b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py @@ -69,6 +69,7 @@ from langchain_core.utils.pydantic import is_basemodel_subclass from pydantic import BaseModel, Field, model_validator from typing_extensions import Self +from langchain_huggingface._version import __version__ from langchain_huggingface.data._profiles import _PROFILES from langchain_huggingface.llms.huggingface_endpoint import HuggingFaceEndpoint from langchain_huggingface.llms.huggingface_pipeline import HuggingFacePipeline @@ -580,6 +581,11 @@ class ChatHuggingFace(BaseChatModel): ): self.model_kwargs = self.llm.model_kwargs.copy() + @model_validator(mode="after") + def _add_pkg_version(self) -> Self: + self._add_version("langchain-huggingface", __version__) + return self + @model_validator(mode="after") def validate_llm(self) -> Self: if ( diff --git a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py index ba646f1309f..ce172f6f611 100644 --- a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py +++ b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py @@ -9,7 +9,9 @@ from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models.llms import BaseLLM from langchain_core.outputs import Generation, GenerationChunk, LLMResult from pydantic import ConfigDict, model_validator +from typing_extensions import Self +from langchain_huggingface._version import __version__ from langchain_huggingface.utils.import_utils import ( IMPORT_ERROR, is_ipex_available, @@ -91,6 +93,11 @@ class HuggingFacePipeline(BaseLLM): extra="forbid", ) + @model_validator(mode="after") + def _add_pkg_version(self) -> Self: + self._add_version("langchain-huggingface", __version__) + return self + @model_validator(mode="before") @classmethod def pre_init_validator(cls, values: dict[str, Any]) -> dict[str, Any]: diff --git a/libs/partners/huggingface/scripts/check_version.py b/libs/partners/huggingface/scripts/check_version.py new file mode 100644 index 00000000000..5dac92fe33a --- /dev/null +++ b/libs/partners/huggingface/scripts/check_version.py @@ -0,0 +1,65 @@ +"""Check version consistency between `pyproject.toml` and `_version.py`. + +This script validates that the version defined in pyproject.toml matches the +`__version__` variable in `langchain_huggingface/_version.py`. Intended for use as a +pre-commit hook to prevent version mismatches. +""" + +import re +import sys +from pathlib import Path + + +def get_pyproject_version(pyproject_path: Path) -> str | None: + """Extract version from `pyproject.toml`.""" + content = pyproject_path.read_text(encoding="utf-8") + match = re.search(r'^version\s*=\s*"([^"]+)"', content, re.MULTILINE) + return match.group(1) if match else None + + +def get_version_py_version(version_path: Path) -> str | None: + """Extract `__version__` from `_version.py`.""" + content = version_path.read_text(encoding="utf-8") + match = re.search(r'^__version__\s*=\s*"([^"]+)"', content, re.MULTILINE) + return match.group(1) if match else None + + +def main() -> int: + """Validate version consistency.""" + script_dir = Path(__file__).parent + package_dir = script_dir.parent + + pyproject_path = package_dir / "pyproject.toml" + version_path = package_dir / "langchain_huggingface" / "_version.py" + + if not pyproject_path.exists(): + print(f"Error: {pyproject_path} not found") # noqa: T201 + return 1 + + if not version_path.exists(): + print(f"Error: {version_path} not found") # noqa: T201 + return 1 + + pyproject_version = get_pyproject_version(pyproject_path) + version_py_version = get_version_py_version(version_path) + + if pyproject_version is None: + print("Error: Could not find version in pyproject.toml") # noqa: T201 + return 1 + + if version_py_version is None: + print("Error: Could not find __version__ in langchain_huggingface/_version.py") # noqa: T201 + return 1 + + if pyproject_version != version_py_version: + print("Error: Version mismatch detected!") # noqa: T201 + print(f" pyproject.toml: {pyproject_version}") # noqa: T201 + print(f" langchain_huggingface/_version.py: {version_py_version}") # noqa: T201 + return 1 + + print(f"Version check passed: {pyproject_version}") # noqa: T201 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 3a90c6c51c0..54a13237976 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -972,7 +972,15 @@ class BaseChatOpenAI(BaseChatModel): @model_validator(mode="after") def _set_version(self) -> Self: - """Set package version in metadata.""" + """Set package version in metadata. + + Note: Subclasses that inherit from `BaseChatOpenAI` (e.g. + `ChatDeepSeek`, `ChatXAI`) must use a **unique** validator name + (e.g. `_set_deepseek_version`) instead of overriding this one. Pydantic + replaces same-named `model_validator` methods rather than chaining them, + so reusing `_set_version` would silently drop the parent's + `langchain-openai` version entry. + """ self._add_version("langchain-openai", __version__) return self diff --git a/libs/partners/perplexity/Makefile b/libs/partners/perplexity/Makefile index bc3581a23b0..930cc0880ac 100644 --- a/libs/partners/perplexity/Makefile +++ b/libs/partners/perplexity/Makefile @@ -1,4 +1,4 @@ -.PHONY: all format lint type test tests integration_tests help extended_tests +.PHONY: all format lint type test tests integration_tests help extended_tests check_version # Default target executed when no arguments are given to make. all: help @@ -53,13 +53,17 @@ format format_diff: check_imports: $(shell find langchain_perplexity -name '*.py') $(UV_RUN_LINT) python ./scripts/check_imports.py $^ +check_version: + uv run python ./scripts/check_version.py + ###################### # HELP ###################### help: @echo '----' - @echo 'check_imports - check imports' + @echo 'check_imports - check imports' + @echo 'check_version - validate version consistency' @echo 'format - run code formatters' @echo 'lint - run linters' @echo 'type - run type checking' diff --git a/libs/partners/perplexity/langchain_perplexity/__init__.py b/libs/partners/perplexity/langchain_perplexity/__init__.py index 5db46f6bc40..d4b4092efb7 100644 --- a/libs/partners/perplexity/langchain_perplexity/__init__.py +++ b/libs/partners/perplexity/langchain_perplexity/__init__.py @@ -1,5 +1,6 @@ """Perplexity AI integration for LangChain.""" +from langchain_perplexity._version import __version__ from langchain_perplexity.chat_models import ChatPerplexity from langchain_perplexity.output_parsers import ( ReasoningJsonOutputParser, @@ -16,6 +17,7 @@ from langchain_perplexity.types import ( ) __all__ = [ + "__version__", "ChatPerplexity", "PerplexitySearchRetriever", "PerplexitySearchResults", diff --git a/libs/partners/perplexity/langchain_perplexity/_version.py b/libs/partners/perplexity/langchain_perplexity/_version.py new file mode 100644 index 00000000000..9921032022e --- /dev/null +++ b/libs/partners/perplexity/langchain_perplexity/_version.py @@ -0,0 +1,3 @@ +"""Version information for `langchain-perplexity`.""" + +__version__ = "1.1.0" diff --git a/libs/partners/perplexity/langchain_perplexity/chat_models.py b/libs/partners/perplexity/langchain_perplexity/chat_models.py index 300250d54e2..ee6a37ef3cb 100644 --- a/libs/partners/perplexity/langchain_perplexity/chat_models.py +++ b/libs/partners/perplexity/langchain_perplexity/chat_models.py @@ -49,6 +49,7 @@ from perplexity import AsyncPerplexity, Perplexity from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator from typing_extensions import Self +from langchain_perplexity._version import __version__ from langchain_perplexity.data._profiles import _PROFILES from langchain_perplexity.output_parsers import ( ReasoningJsonOutputParser, @@ -293,6 +294,7 @@ class ChatPerplexity(BaseChatModel): @model_validator(mode="after") def validate_environment(self) -> Self: """Validate that api key and python package exists in environment.""" + self._add_version("langchain-perplexity", __version__) pplx_api_key = ( self.pplx_api_key.get_secret_value() if self.pplx_api_key else None ) diff --git a/libs/partners/perplexity/scripts/check_version.py b/libs/partners/perplexity/scripts/check_version.py new file mode 100644 index 00000000000..4c54aa698ce --- /dev/null +++ b/libs/partners/perplexity/scripts/check_version.py @@ -0,0 +1,65 @@ +"""Check version consistency between `pyproject.toml` and `_version.py`. + +This script validates that the version defined in pyproject.toml matches the +`__version__` variable in `langchain_perplexity/_version.py`. Intended for use as a +pre-commit hook to prevent version mismatches. +""" + +import re +import sys +from pathlib import Path + + +def get_pyproject_version(pyproject_path: Path) -> str | None: + """Extract version from `pyproject.toml`.""" + content = pyproject_path.read_text(encoding="utf-8") + match = re.search(r'^version\s*=\s*"([^"]+)"', content, re.MULTILINE) + return match.group(1) if match else None + + +def get_version_py_version(version_path: Path) -> str | None: + """Extract `__version__` from `_version.py`.""" + content = version_path.read_text(encoding="utf-8") + match = re.search(r'^__version__\s*=\s*"([^"]+)"', content, re.MULTILINE) + return match.group(1) if match else None + + +def main() -> int: + """Validate version consistency.""" + script_dir = Path(__file__).parent + package_dir = script_dir.parent + + pyproject_path = package_dir / "pyproject.toml" + version_path = package_dir / "langchain_perplexity" / "_version.py" + + if not pyproject_path.exists(): + print(f"Error: {pyproject_path} not found") # noqa: T201 + return 1 + + if not version_path.exists(): + print(f"Error: {version_path} not found") # noqa: T201 + return 1 + + pyproject_version = get_pyproject_version(pyproject_path) + version_py_version = get_version_py_version(version_path) + + if pyproject_version is None: + print("Error: Could not find version in pyproject.toml") # noqa: T201 + return 1 + + if version_py_version is None: + print("Error: Could not find __version__ in langchain_perplexity/_version.py") # noqa: T201 + return 1 + + if pyproject_version != version_py_version: + print("Error: Version mismatch detected!") # noqa: T201 + print(f" pyproject.toml: {pyproject_version}") # noqa: T201 + print(f" langchain_perplexity/_version.py: {version_py_version}") # noqa: T201 + return 1 + + print(f"Version check passed: {pyproject_version}") # noqa: T201 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/libs/partners/xai/langchain_xai/chat_models.py b/libs/partners/xai/langchain_xai/chat_models.py index 89904e56916..debe4cb9394 100644 --- a/libs/partners/xai/langchain_xai/chat_models.py +++ b/libs/partners/xai/langchain_xai/chat_models.py @@ -470,8 +470,12 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] return params @model_validator(mode="after") - def _set_version(self) -> Self: - """Set package version in metadata.""" + def _set_xai_version(self) -> Self: + """Set package version in metadata. + + Named uniquely to avoid shadowing `BaseChatOpenAI._set_version`; + Pydantic replaces same-named validators rather than chaining them. + """ self._add_version("langchain-xai", __version__) return self