diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 91f1faf3055..132deb5a148 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -134,7 +134,7 @@ Run these locally before submitting a PR; the CI system will check also. #### Code Formatting -Formatting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/) and [ruff](https://docs.astral.sh/ruff/rules/). +Formatting for this project is done via [ruff](https://docs.astral.sh/ruff/rules/). To run formatting for docs, cookbook and templates: @@ -159,7 +159,7 @@ This is especially useful when you have made changes to a subset of the project #### Linting -Linting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/), [ruff](https://docs.astral.sh/ruff/rules/), and [mypy](http://mypy-lang.org/). +Linting for this project is done via a combination of [ruff](https://docs.astral.sh/ruff/rules/) and [mypy](http://mypy-lang.org/). To run linting for docs, cookbook and templates: @@ -302,8 +302,8 @@ make api_docs_linkcheck ### Verify Documentation changes -After pushing documentation changes to the repository, you can preview and verify that the changes are -what you wanted by clicking the `View deployment` or `Visit Preview` buttons on the pull request `Conversation` page. +After pushing documentation changes to the repository, you can preview and verify that the changes are +what you wanted by clicking the `View deployment` or `Visit Preview` buttons on the pull request `Conversation` page. This will take you to a preview of the documentation changes. This preview is created by [Vercel](https://vercel.com/docs/getting-started-with-vercel). diff --git a/Makefile b/Makefile index 961819bea74..6fa779b07fa 100644 --- a/Makefile +++ b/Makefile @@ -43,10 +43,10 @@ spell_fix: lint: poetry run ruff docs templates cookbook - poetry run black docs templates cookbook --check + poetry run ruff format docs templates cookbook --diff format format_diff: - poetry run black docs templates cookbook + poetry run ruff format docs templates cookbook poetry run ruff --select I --fix docs templates cookbook ###################### diff --git a/libs/experimental/Makefile b/libs/experimental/Makefile index e3a12d17211..0325c7f6c63 100644 --- a/libs/experimental/Makefile +++ b/libs/experimental/Makefile @@ -9,7 +9,7 @@ TEST_FILE ?= tests/unit_tests/ test: poetry run pytest $(TEST_FILE) -tests: +tests: poetry run pytest $(TEST_FILE) test_watch: @@ -33,11 +33,11 @@ lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/experimenta lint lint_diff: poetry run mypy $(PYTHON_FILES) - poetry run black $(PYTHON_FILES) --check + poetry run ruff format $(PYTHON_FILES) --diff poetry run ruff . format format_diff: - poetry run black $(PYTHON_FILES) + poetry run ruff format $(PYTHON_FILES) poetry run ruff --select I --fix $(PYTHON_FILES) spell_check: diff --git a/libs/experimental/langchain_experimental/tabular_synthetic_data/openai.py b/libs/experimental/langchain_experimental/tabular_synthetic_data/openai.py index 41d3575f90d..6ceaee0ede9 100644 --- a/libs/experimental/langchain_experimental/tabular_synthetic_data/openai.py +++ b/libs/experimental/langchain_experimental/tabular_synthetic_data/openai.py @@ -16,7 +16,7 @@ def create_openai_data_generator( llm: ChatOpenAI, prompt: BasePromptTemplate, output_parser: Optional[BaseLLMOutputParser] = None, - **kwargs: Any + **kwargs: Any, ) -> SyntheticDataGenerator: """ Create an instance of SyntheticDataGenerator tailored for OpenAI models. diff --git a/libs/experimental/langchain_experimental/tot/thought_generation.py b/libs/experimental/langchain_experimental/tot/thought_generation.py index 8a7a665663b..655a5f7ebd8 100644 --- a/libs/experimental/langchain_experimental/tot/thought_generation.py +++ b/libs/experimental/langchain_experimental/tot/thought_generation.py @@ -29,7 +29,7 @@ class BaseThoughtGenerationStrategy(LLMChain): self, problem_description: str, thoughts_path: Tuple[str, ...] = (), - **kwargs: Any + **kwargs: Any, ) -> str: """ Generate the next thought given the problem description and the thoughts @@ -52,7 +52,7 @@ class SampleCoTStrategy(BaseThoughtGenerationStrategy): self, problem_description: str, thoughts_path: Tuple[str, ...] = (), - **kwargs: Any + **kwargs: Any, ) -> str: response_text = self.predict_and_parse( problem_description=problem_description, thoughts=thoughts_path, **kwargs @@ -76,14 +76,14 @@ class ProposePromptStrategy(BaseThoughtGenerationStrategy): self, problem_description: str, thoughts_path: Tuple[str, ...] = (), - **kwargs: Any + **kwargs: Any, ) -> str: if thoughts_path not in self.tot_memory or not self.tot_memory[thoughts_path]: new_thoughts = self.predict_and_parse( problem_description=problem_description, thoughts=thoughts_path, n=self.c, - **kwargs + **kwargs, ) if not new_thoughts: return "" diff --git a/libs/experimental/poetry.lock b/libs/experimental/poetry.lock index b1173706e43..18c375c686e 100644 --- a/libs/experimental/poetry.lock +++ b/libs/experimental/poetry.lock @@ -350,48 +350,6 @@ soupsieve = ">1.2" html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "23.10.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.10.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:f8dc7d50d94063cdfd13c82368afd8588bac4ce360e4224ac399e769d6704e98"}, - {file = "black-23.10.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:f20ff03f3fdd2fd4460b4f631663813e57dc277e37fb216463f3b907aa5a9bdd"}, - {file = "black-23.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3d9129ce05b0829730323bdcb00f928a448a124af5acf90aa94d9aba6969604"}, - {file = "black-23.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:960c21555be135c4b37b7018d63d6248bdae8514e5c55b71e994ad37407f45b8"}, - {file = "black-23.10.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:30b78ac9b54cf87bcb9910ee3d499d2bc893afd52495066c49d9ee6b21eee06e"}, - {file = "black-23.10.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:0e232f24a337fed7a82c1185ae46c56c4a6167fb0fe37411b43e876892c76699"}, - {file = "black-23.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31946ec6f9c54ed7ba431c38bc81d758970dd734b96b8e8c2b17a367d7908171"}, - {file = "black-23.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:c870bee76ad5f7a5ea7bd01dc646028d05568d33b0b09b7ecfc8ec0da3f3f39c"}, - {file = "black-23.10.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:6901631b937acbee93c75537e74f69463adaf34379a04eef32425b88aca88a23"}, - {file = "black-23.10.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:481167c60cd3e6b1cb8ef2aac0f76165843a374346aeeaa9d86765fe0dd0318b"}, - {file = "black-23.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74892b4b836e5162aa0452393112a574dac85e13902c57dfbaaf388e4eda37c"}, - {file = "black-23.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:47c4510f70ec2e8f9135ba490811c071419c115e46f143e4dce2ac45afdcf4c9"}, - {file = "black-23.10.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:76baba9281e5e5b230c9b7f83a96daf67a95e919c2dfc240d9e6295eab7b9204"}, - {file = "black-23.10.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:a3c2ddb35f71976a4cfeca558848c2f2f89abc86b06e8dd89b5a65c1e6c0f22a"}, - {file = "black-23.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db451a3363b1e765c172c3fd86213a4ce63fb8524c938ebd82919bf2a6e28c6a"}, - {file = "black-23.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:7fb5fc36bb65160df21498d5a3dd330af8b6401be3f25af60c6ebfe23753f747"}, - {file = "black-23.10.0-py3-none-any.whl", hash = "sha256:e223b731a0e025f8ef427dd79d8cd69c167da807f5710add30cdf131f13dd62e"}, - {file = "black-23.10.0.tar.gz", hash = "sha256:31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "bleach" version = "6.1.0" @@ -648,7 +606,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, @@ -1044,7 +1002,7 @@ files = [ {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"}, {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, @@ -1054,6 +1012,7 @@ files = [ {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, @@ -1086,7 +1045,6 @@ files = [ {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, ] [package.extras] @@ -1794,16 +1752,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2344,17 +2292,6 @@ files = [ qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, -] - [[package]] name = "pexpect" version = "4.8.0" @@ -2970,7 +2907,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2978,15 +2914,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3003,7 +2932,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3011,7 +2939,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3448,28 +3375,28 @@ files = [ [[package]] name = "ruff" -version = "0.1.0" +version = "0.1.3" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.0-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:87114e254dee35e069e1b922d85d4b21a5b61aec759849f393e1dbb308a00439"}, - {file = "ruff-0.1.0-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:764f36d2982cc4a703e69fb73a280b7c539fd74b50c9ee531a4e3fe88152f521"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65f4b7fb539e5cf0f71e9bd74f8ddab74cabdd673c6fb7f17a4dcfd29f126255"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:299fff467a0f163baa282266b310589b21400de0a42d8f68553422fa6bf7ee01"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d412678bf205787263bb702c984012a4f97e460944c072fd7cfa2bd084857c4"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a5391b49b1669b540924640587d8d24128e45be17d1a916b1801d6645e831581"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee8cd57f454cdd77bbcf1e11ff4e0046fb6547cac1922cc6e3583ce4b9c326d1"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7aeed7bc23861a2b38319b636737bf11cfa55d2109620b49cf995663d3e888"}, - {file = "ruff-0.1.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04cd4298b43b16824d9a37800e4c145ba75c29c43ce0d74cad1d66d7ae0a4c5"}, - {file = "ruff-0.1.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7186ccf54707801d91e6314a016d1c7895e21d2e4cd614500d55870ed983aa9f"}, - {file = "ruff-0.1.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d88adfd93849bc62449518228581d132e2023e30ebd2da097f73059900d8dce3"}, - {file = "ruff-0.1.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ad2ccdb3bad5a61013c76a9c1240fdfadf2c7103a2aeebd7bcbbed61f363138f"}, - {file = "ruff-0.1.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b77f6cfa72c6eb19b5cac967cc49762ae14d036db033f7d97a72912770fd8e1c"}, - {file = "ruff-0.1.0-py3-none-win32.whl", hash = "sha256:480bd704e8af1afe3fd444cc52e3c900b936e6ca0baf4fb0281124330b6ceba2"}, - {file = "ruff-0.1.0-py3-none-win_amd64.whl", hash = "sha256:a76ba81860f7ee1f2d5651983f87beb835def94425022dc5f0803108f1b8bfa2"}, - {file = "ruff-0.1.0-py3-none-win_arm64.whl", hash = "sha256:45abdbdab22509a2c6052ecf7050b3f5c7d6b7898dc07e82869401b531d46da4"}, - {file = "ruff-0.1.0.tar.gz", hash = "sha256:ad6b13824714b19c5f8225871cf532afb994470eecb74631cd3500fe817e6b3f"}, + {file = "ruff-0.1.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b46d43d51f7061652eeadb426a9e3caa1e0002470229ab2fc19de8a7b0766901"}, + {file = "ruff-0.1.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b8afeb9abd26b4029c72adc9921b8363374f4e7edb78385ffaa80278313a15f9"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca3cf365bf32e9ba7e6db3f48a4d3e2c446cd19ebee04f05338bc3910114528b"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4874c165f96c14a00590dcc727a04dca0cfd110334c24b039458c06cf78a672e"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec2dd31eed114e48ea42dbffc443e9b7221976554a504767ceaee3dd38edeb8"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dc3ec4edb3b73f21b4aa51337e16674c752f1d76a4a543af56d7d04e97769613"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e3de9ed2e39160800281848ff4670e1698037ca039bda7b9274f849258d26ce"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c595193881922cc0556a90f3af99b1c5681f0c552e7a2a189956141d8666fe8"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f75e670d529aa2288cd00fc0e9b9287603d95e1536d7a7e0cafe00f75e0dd9d"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76dd49f6cd945d82d9d4a9a6622c54a994689d8d7b22fa1322983389b4892e20"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:918b454bc4f8874a616f0d725590277c42949431ceb303950e87fef7a7d94cb3"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8859605e729cd5e53aa38275568dbbdb4fe882d2ea2714c5453b678dca83784"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0b6c55f5ef8d9dd05b230bb6ab80bc4381ecb60ae56db0330f660ea240cb0d4a"}, + {file = "ruff-0.1.3-py3-none-win32.whl", hash = "sha256:3e7afcbdcfbe3399c34e0f6370c30f6e529193c731b885316c5a09c9e4317eef"}, + {file = "ruff-0.1.3-py3-none-win_amd64.whl", hash = "sha256:7a18df6638cec4a5bd75350639b2bb2a2366e01222825562c7346674bdceb7ea"}, + {file = "ruff-0.1.3-py3-none-win_arm64.whl", hash = "sha256:12fd53696c83a194a2db7f9a46337ce06445fb9aa7d25ea6f293cf75b21aca9f"}, + {file = "ruff-0.1.3.tar.gz", hash = "sha256:3ba6145369a151401d5db79f0a47d50e470384d0d89d0d6f7fab0b589ad07c34"}, ] [[package]] @@ -3609,11 +3536,6 @@ files = [ {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f66eddfda9d45dd6cadcd706b65669ce1df84b8549875691b1f403730bdef217"}, {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6448c37741145b241eeac617028ba6ec2119e1339b1385c9720dae31367f2be"}, {file = "scikit_learn-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c413c2c850241998168bbb3bd1bb59ff03b1195a53864f0b80ab092071af6028"}, - {file = "scikit_learn-1.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ef540e09873e31569bc8b02c8a9f745ee04d8e1263255a15c9969f6f5caa627f"}, - {file = "scikit_learn-1.3.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9147a3a4df4d401e618713880be023e36109c85d8569b3bf5377e6cd3fecdeac"}, - {file = "scikit_learn-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2cd3634695ad192bf71645702b3df498bd1e246fc2d529effdb45a06ab028b4"}, - {file = "scikit_learn-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c275a06c5190c5ce00af0acbb61c06374087949f643ef32d355ece12c4db043"}, - {file = "scikit_learn-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:0e1aa8f206d0de814b81b41d60c1ce31f7f2c7354597af38fae46d9c47c45122"}, {file = "scikit_learn-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:52b77cc08bd555969ec5150788ed50276f5ef83abb72e6f469c5b91a0009bbca"}, {file = "scikit_learn-1.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a683394bc3f80b7c312c27f9b14ebea7766b1f0a34faf1a2e9158d80e860ec26"}, {file = "scikit_learn-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15d964d9eb181c79c190d3dbc2fff7338786bf017e9039571418a1d53dab236"}, @@ -4957,4 +4879,4 @@ extended-testing = ["faker", "presidio-analyzer", "presidio-anonymizer", "senten [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "f23fcff0e5ab8f172578b4f4da8dd204375357c7a5ad10c97bb77de7208bc889" +content-hash = "f038878d61e8970fa9062263822db5757face25be16a2de7fda24732df86c6f0" diff --git a/libs/experimental/pyproject.toml b/libs/experimental/pyproject.toml index 850449cd63d..17bc009318c 100644 --- a/libs/experimental/pyproject.toml +++ b/libs/experimental/pyproject.toml @@ -18,8 +18,7 @@ vowpal-wabbit-next = {version = "0.6.0", optional = true} sentence-transformers = {version = "^2", optional = true} [tool.poetry.group.lint.dependencies] -ruff = "^0.1" -black = "^23.10.0" +ruff = "^0.1.3" [tool.poetry.group.typing.dependencies] mypy = "^0.991" diff --git a/libs/experimental/tests/unit_tests/rl_chain/test_pick_best_chain_call.py b/libs/experimental/tests/unit_tests/rl_chain/test_pick_best_chain_call.py index 765e52e05eb..6a1bc3fc8ab 100644 --- a/libs/experimental/tests/unit_tests/rl_chain/test_pick_best_chain_call.py +++ b/libs/experimental/tests/unit_tests/rl_chain/test_pick_best_chain_call.py @@ -95,7 +95,8 @@ def test_update_with_delayed_score_with_auto_validator_throws() -> None: assert selection_metadata.selected.score == 3.0 # type: ignore with pytest.raises(RuntimeError): chain.update_with_delayed_score( - chain_response=response, score=100 # type: ignore + chain_response=response, + score=100, # type: ignore ) @@ -121,7 +122,9 @@ def test_update_with_delayed_score_force() -> None: selection_metadata = response["selection_metadata"] # type: ignore assert selection_metadata.selected.score == 3.0 # type: ignore chain.update_with_delayed_score( - chain_response=response, score=100, force_score=True # type: ignore + chain_response=response, + score=100, + force_score=True, # type: ignore ) assert selection_metadata.selected.score == 100.0 # type: ignore diff --git a/libs/langchain/Makefile b/libs/langchain/Makefile index abaa4ced225..6a15cb7c7ab 100644 --- a/libs/langchain/Makefile +++ b/libs/langchain/Makefile @@ -50,11 +50,11 @@ lint lint_diff: ./scripts/check_pydantic.sh . ./scripts/check_imports.sh poetry run ruff . - [ "$(PYTHON_FILES)" = "" ] || poetry run black $(PYTHON_FILES) --check + [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff [ "$(PYTHON_FILES)" = "" ] || poetry run mypy $(PYTHON_FILES) format format_diff: - [ "$(PYTHON_FILES)" = "" ] || poetry run black $(PYTHON_FILES) + [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) [ "$(PYTHON_FILES)" = "" ] || poetry run ruff --select I --fix $(PYTHON_FILES) spell_check: diff --git a/libs/langchain/langchain/_api/path.py b/libs/langchain/langchain/_api/path.py index f75848ff59a..0589ae44956 100644 --- a/libs/langchain/langchain/_api/path.py +++ b/libs/langchain/langchain/_api/path.py @@ -22,7 +22,7 @@ def as_import_path( file: Union[Path, str], *, suffix: Optional[str] = None, - relative_to: Path = PACKAGE_DIR + relative_to: Path = PACKAGE_DIR, ) -> str: """Path of the file as a LangChain import exclude langchain top namespace.""" if isinstance(file, str): diff --git a/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py b/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py index 717d3c783b1..fb57fba0b2a 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py +++ b/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py @@ -32,7 +32,7 @@ def create_conversational_retrieval_agent( system_message: Optional[SystemMessage] = None, verbose: bool = False, max_token_limit: int = 2000, - **kwargs: Any + **kwargs: Any, ) -> AgentExecutor: """A convenience method for creating a conversational retrieval agent. @@ -83,5 +83,5 @@ def create_conversational_retrieval_agent( memory=memory, verbose=verbose, return_intermediate_steps=remember_intermediate_steps, - **kwargs + **kwargs, ) diff --git a/libs/langchain/langchain/callbacks/argilla_callback.py b/libs/langchain/langchain/callbacks/argilla_callback.py index 4763a7f11e0..eb2d2da18e6 100644 --- a/libs/langchain/langchain/callbacks/argilla_callback.py +++ b/libs/langchain/langchain/callbacks/argilla_callback.py @@ -284,7 +284,8 @@ class ArgillaCallbackHandler(BaseCallbackHandler): }, } for prompt, output in zip( - prompts, chain_output_val # type: ignore + prompts, # type: ignore + chain_output_val, ) ] ) diff --git a/libs/langchain/langchain/callbacks/flyte_callback.py b/libs/langchain/langchain/callbacks/flyte_callback.py index ad696b81393..fbdf2810e5b 100644 --- a/libs/langchain/langchain/callbacks/flyte_callback.py +++ b/libs/langchain/langchain/callbacks/flyte_callback.py @@ -195,7 +195,9 @@ class FlyteCallbackHandler(BaseMetadataCallbackHandler, BaseCallbackHandler): ) ) - complexity_metrics: Dict[str, float] = generation_resp.pop("text_complexity_metrics") # type: ignore # noqa: E501 + complexity_metrics: Dict[str, float] = generation_resp.pop( + "text_complexity_metrics" + ) # type: ignore # noqa: E501 self.deck.append( self.markdown_renderer().to_html("#### Text Complexity Metrics") ) diff --git a/libs/langchain/langchain/callbacks/manager.py b/libs/langchain/langchain/callbacks/manager.py index 021e7b1879a..da8b73d93be 100644 --- a/libs/langchain/langchain/callbacks/manager.py +++ b/libs/langchain/langchain/callbacks/manager.py @@ -64,20 +64,14 @@ logger = logging.getLogger(__name__) openai_callback_var: ContextVar[Optional[OpenAICallbackHandler]] = ContextVar( "openai_callback", default=None ) -tracing_callback_var: ContextVar[ - Optional[LangChainTracerV1] -] = ContextVar( # noqa: E501 +tracing_callback_var: ContextVar[Optional[LangChainTracerV1]] = ContextVar( # noqa: E501 "tracing_callback", default=None ) -wandb_tracing_callback_var: ContextVar[ - Optional[WandbTracer] -] = ContextVar( # noqa: E501 +wandb_tracing_callback_var: ContextVar[Optional[WandbTracer]] = ContextVar( # noqa: E501 "tracing_wandb_callback", default=None ) -tracing_v2_callback_var: ContextVar[ - Optional[LangChainTracer] -] = ContextVar( # noqa: E501 +tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar( # noqa: E501 "tracing_callback_v2", default=None ) run_collector_var: ContextVar[ diff --git a/libs/langchain/langchain/callbacks/mlflow_callback.py b/libs/langchain/langchain/callbacks/mlflow_callback.py index 85ffcfdb06d..9fc48641cdb 100644 --- a/libs/langchain/langchain/callbacks/mlflow_callback.py +++ b/libs/langchain/langchain/callbacks/mlflow_callback.py @@ -371,7 +371,9 @@ class MlflowCallbackHandler(BaseMetadataCallbackHandler, BaseCallbackHandler): nlp=self.nlp, ) ) - complexity_metrics: Dict[str, float] = generation_resp.pop("text_complexity_metrics") # type: ignore # noqa: E501 + complexity_metrics: Dict[str, float] = generation_resp.pop( + "text_complexity_metrics" + ) # type: ignore # noqa: E501 self.mlflg.metrics( complexity_metrics, step=self.metrics["step"], diff --git a/libs/langchain/langchain/callbacks/streaming_stdout.py b/libs/langchain/langchain/callbacks/streaming_stdout.py index fefba70683f..38d1850dd0e 100644 --- a/libs/langchain/langchain/callbacks/streaming_stdout.py +++ b/libs/langchain/langchain/callbacks/streaming_stdout.py @@ -19,7 +19,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler): self, serialized: Dict[str, Any], messages: List[List[BaseMessage]], - **kwargs: Any + **kwargs: Any, ) -> None: """Run when LLM starts running.""" diff --git a/libs/langchain/langchain/callbacks/streaming_stdout_final_only.py b/libs/langchain/langchain/callbacks/streaming_stdout_final_only.py index ba9e7da59f7..6f3593d4d73 100644 --- a/libs/langchain/langchain/callbacks/streaming_stdout_final_only.py +++ b/libs/langchain/langchain/callbacks/streaming_stdout_final_only.py @@ -32,7 +32,7 @@ class FinalStreamingStdOutCallbackHandler(StreamingStdOutCallbackHandler): *, answer_prefix_tokens: Optional[List[str]] = None, strip_tokens: bool = True, - stream_prefix: bool = False + stream_prefix: bool = False, ) -> None: """Instantiate FinalStreamingStdOutCallbackHandler. diff --git a/libs/langchain/langchain/callbacks/tracers/root_listeners.py b/libs/langchain/langchain/callbacks/tracers/root_listeners.py index be837de1baa..cb065bf8866 100644 --- a/libs/langchain/langchain/callbacks/tracers/root_listeners.py +++ b/libs/langchain/langchain/callbacks/tracers/root_listeners.py @@ -11,7 +11,7 @@ class RootListenersTracer(BaseTracer): *, on_start: Optional[Callable[[Run], None]], on_end: Optional[Callable[[Run], None]], - on_error: Optional[Callable[[Run], None]] + on_error: Optional[Callable[[Run], None]], ) -> None: super().__init__() diff --git a/libs/langchain/langchain/chains/api/openapi/chain.py b/libs/langchain/langchain/chains/api/openapi/chain.py index 1ef5b3c88f3..6db93aefc39 100644 --- a/libs/langchain/langchain/chains/api/openapi/chain.py +++ b/libs/langchain/langchain/chains/api/openapi/chain.py @@ -171,7 +171,7 @@ class OpenAPIEndpointChain(Chain, BaseModel): llm: BaseLanguageModel, requests: Optional[Requests] = None, return_intermediate_steps: bool = False, - **kwargs: Any + **kwargs: Any, # TODO: Handle async ) -> "OpenAPIEndpointChain": """Create an OpenAPIEndpoint from a spec at the specified url.""" @@ -194,7 +194,7 @@ class OpenAPIEndpointChain(Chain, BaseModel): return_intermediate_steps: bool = False, raw_response: bool = False, callbacks: Callbacks = None, - **kwargs: Any + **kwargs: Any, # TODO: Handle async ) -> "OpenAPIEndpointChain": """Create an OpenAPIEndpointChain from an operation and a spec.""" diff --git a/libs/langchain/langchain/chains/openai_functions/tagging.py b/libs/langchain/langchain/chains/openai_functions/tagging.py index e5c2e83016c..c9498a73085 100644 --- a/libs/langchain/langchain/chains/openai_functions/tagging.py +++ b/libs/langchain/langchain/chains/openai_functions/tagging.py @@ -32,7 +32,7 @@ def create_tagging_chain( schema: dict, llm: BaseLanguageModel, prompt: Optional[ChatPromptTemplate] = None, - **kwargs: Any + **kwargs: Any, ) -> Chain: """Creates a chain that extracts information from a passage based on a schema. @@ -62,7 +62,7 @@ def create_tagging_chain_pydantic( pydantic_schema: Any, llm: BaseLanguageModel, prompt: Optional[ChatPromptTemplate] = None, - **kwargs: Any + **kwargs: Any, ) -> Chain: """Creates a chain that extracts information from a passage based on a pydantic schema. diff --git a/libs/langchain/langchain/chat_models/promptlayer_openai.py b/libs/langchain/langchain/chat_models/promptlayer_openai.py index 2f29b548e3a..c55e6051c71 100644 --- a/libs/langchain/langchain/chat_models/promptlayer_openai.py +++ b/libs/langchain/langchain/chat_models/promptlayer_openai.py @@ -44,7 +44,7 @@ class PromptLayerChatOpenAI(ChatOpenAI): stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None, stream: Optional[bool] = None, - **kwargs: Any + **kwargs: Any, ) -> ChatResult: """Call ChatOpenAI generate and then call PromptLayer API to log the request.""" from promptlayer.utils import get_api_key, promptlayer_api_request @@ -86,7 +86,7 @@ class PromptLayerChatOpenAI(ChatOpenAI): stop: Optional[List[str]] = None, run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, stream: Optional[bool] = None, - **kwargs: Any + **kwargs: Any, ) -> ChatResult: """Call ChatOpenAI agenerate and then call PromptLayer to log.""" from promptlayer.utils import get_api_key, promptlayer_api_request_async diff --git a/libs/langchain/langchain/document_loaders/azure_blob_storage_container.py b/libs/langchain/langchain/document_loaders/azure_blob_storage_container.py index da3542a1d85..45126626f73 100644 --- a/libs/langchain/langchain/document_loaders/azure_blob_storage_container.py +++ b/libs/langchain/langchain/document_loaders/azure_blob_storage_container.py @@ -36,7 +36,9 @@ class AzureBlobStorageContainerLoader(BaseLoader): blob_list = container.list_blobs(name_starts_with=self.prefix) for blob in blob_list: loader = AzureBlobStorageFileLoader( - self.conn_str, self.container, blob.name # type: ignore + self.conn_str, + self.container, + blob.name, # type: ignore ) docs.extend(loader.load()) return docs diff --git a/libs/langchain/langchain/document_loaders/blackboard.py b/libs/langchain/langchain/document_loaders/blackboard.py index 18721e8345c..a1690eeb991 100644 --- a/libs/langchain/langchain/document_loaders/blackboard.py +++ b/libs/langchain/langchain/document_loaders/blackboard.py @@ -211,7 +211,9 @@ class BlackboardLoader(WebBaseLoader): """ # Create the document loader loader = DirectoryLoader( - path=self.folder_path, glob="*.pdf", loader_cls=PyPDFLoader # type: ignore + path=self.folder_path, + glob="*.pdf", + loader_cls=PyPDFLoader, # type: ignore ) # Load the documents documents = loader.load() diff --git a/libs/langchain/langchain/embeddings/johnsnowlabs.py b/libs/langchain/langchain/embeddings/johnsnowlabs.py index ead57e52283..494b66bfbd2 100644 --- a/libs/langchain/langchain/embeddings/johnsnowlabs.py +++ b/libs/langchain/langchain/embeddings/johnsnowlabs.py @@ -25,7 +25,7 @@ class JohnSnowLabsEmbeddings(BaseModel, Embeddings): self, model: Any = "embed_sentence.bert", hardware_target: str = "cpu", - **kwargs: Any + **kwargs: Any, ): """Initialize the johnsnowlabs model.""" super().__init__(**kwargs) diff --git a/libs/langchain/langchain/embeddings/localai.py b/libs/langchain/langchain/embeddings/localai.py index f63a6a66a18..53f51d55b80 100644 --- a/libs/langchain/langchain/embeddings/localai.py +++ b/libs/langchain/langchain/embeddings/localai.py @@ -269,9 +269,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings): self, input=[text], **self._invocation_params, - )["data"][ - 0 - ]["embedding"] + )["data"][0]["embedding"] async def _aembedding_func(self, text: str, *, engine: str) -> List[float]: """Call out to LocalAI's embedding endpoint.""" diff --git a/libs/langchain/langchain/embeddings/nlpcloud.py b/libs/langchain/langchain/embeddings/nlpcloud.py index 2d558fda2a7..1b698c95861 100644 --- a/libs/langchain/langchain/embeddings/nlpcloud.py +++ b/libs/langchain/langchain/embeddings/nlpcloud.py @@ -26,7 +26,7 @@ class NLPCloudEmbeddings(BaseModel, Embeddings): self, model_name: str = "paraphrase-multilingual-mpnet-base-v2", gpu: bool = False, - **kwargs: Any + **kwargs: Any, ) -> None: super().__init__(model_name=model_name, gpu=gpu, **kwargs) diff --git a/libs/langchain/langchain/embeddings/openai.py b/libs/langchain/langchain/embeddings/openai.py index 274788a1a8c..afd5537ee70 100644 --- a/libs/langchain/langchain/embeddings/openai.py +++ b/libs/langchain/langchain/embeddings/openai.py @@ -393,9 +393,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings): self, input="", **self._invocation_params, - )[ - "data" - ][0]["embedding"] + )["data"][0]["embedding"] else: average = np.average(_result, axis=0, weights=num_tokens_in_batch[i]) embeddings[i] = (average / np.linalg.norm(average)).tolist() diff --git a/libs/langchain/langchain/indexes/vectorstore.py b/libs/langchain/langchain/indexes/vectorstore.py index d15fb7a7341..8e32dc18fdc 100644 --- a/libs/langchain/langchain/indexes/vectorstore.py +++ b/libs/langchain/langchain/indexes/vectorstore.py @@ -34,7 +34,7 @@ class VectorStoreIndexWrapper(BaseModel): question: str, llm: Optional[BaseLanguageModel] = None, retriever_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> str: """Query the vectorstore.""" llm = llm or OpenAI(temperature=0) @@ -49,7 +49,7 @@ class VectorStoreIndexWrapper(BaseModel): question: str, llm: Optional[BaseLanguageModel] = None, retriever_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> dict: """Query the vectorstore and get back sources.""" llm = llm or OpenAI(temperature=0) diff --git a/libs/langchain/langchain/llms/predibase.py b/libs/langchain/langchain/llms/predibase.py index 98ab06937ae..79c758dab3d 100644 --- a/libs/langchain/langchain/llms/predibase.py +++ b/libs/langchain/langchain/llms/predibase.py @@ -25,7 +25,7 @@ class Predibase(LLM): prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any + **kwargs: Any, ) -> str: try: from predibase import PredibaseClient diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py index 7d507a6e9ff..538f683fa61 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py @@ -58,7 +58,7 @@ class LLMChainFilter(BaseDocumentCompressor): cls, llm: BaseLanguageModel, prompt: Optional[BasePromptTemplate] = None, - **kwargs: Any + **kwargs: Any, ) -> "LLMChainFilter": """Create a LLMChainFilter from a language model. diff --git a/libs/langchain/langchain/retrievers/tavily_search_api.py b/libs/langchain/langchain/retrievers/tavily_search_api.py index c61613aec82..48f95abb8b0 100644 --- a/libs/langchain/langchain/retrievers/tavily_search_api.py +++ b/libs/langchain/langchain/retrievers/tavily_search_api.py @@ -49,7 +49,7 @@ class TavilySearchAPIRetriever(BaseRetriever): exclude_domains=self.exclude_domains, include_raw_content=self.include_raw_content, include_images=self.include_images, - **self.kwargs + **self.kwargs, ) docs = [ Document( diff --git a/libs/langchain/langchain/schema/messages.py b/libs/langchain/langchain/schema/messages.py index 1edb5d1bd8a..51f9585d636 100644 --- a/libs/langchain/langchain/schema/messages.py +++ b/libs/langchain/langchain/schema/messages.py @@ -241,9 +241,7 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): # Ignoring mypy re-assignment here since we're overriding the value # to make sure that the chunk variant can be discriminated from the # non-chunk variant. - type: Literal[ - "FunctionMessageChunk" - ] = "FunctionMessageChunk" # type: ignore[assignment] + type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment] def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, FunctionMessageChunk): diff --git a/libs/langchain/langchain/schema/runnable/base.py b/libs/langchain/langchain/schema/runnable/base.py index 6971bfd68d6..62f4f56d4ad 100644 --- a/libs/langchain/langchain/schema/runnable/base.py +++ b/libs/langchain/langchain/schema/runnable/base.py @@ -933,9 +933,7 @@ class Runnable(Generic[Input, Output], ABC): ) if accepts_run_manager(transformer): kwargs["run_manager"] = run_manager - iterator = transformer( - input_for_transform, **kwargs - ) # type: ignore[call-arg] + iterator = transformer(input_for_transform, **kwargs) # type: ignore[call-arg] for chunk in iterator: yield chunk if final_output_supported: @@ -1011,9 +1009,7 @@ class Runnable(Generic[Input, Output], ABC): ) if accepts_run_manager(transformer): kwargs["run_manager"] = run_manager - iterator = transformer( - input_for_transform, **kwargs - ) # type: ignore[call-arg] + iterator = transformer(input_for_transform, **kwargs) # type: ignore[call-arg] async for chunk in iterator: yield chunk if final_output_supported: diff --git a/libs/langchain/langchain/schema/runnable/branch.py b/libs/langchain/langchain/schema/runnable/branch.py index efbdcfcccbe..c2de60dab0c 100644 --- a/libs/langchain/langchain/schema/runnable/branch.py +++ b/libs/langchain/langchain/schema/runnable/branch.py @@ -86,7 +86,8 @@ class RunnableBranch(RunnableSerializable[Input, Output]): default = branches[-1] if not isinstance( - default, (Runnable, Callable, Mapping) # type: ignore[arg-type] + default, + (Runnable, Callable, Mapping), # type: ignore[arg-type] ): raise TypeError( "RunnableBranch default must be runnable, callable or mapping." diff --git a/libs/langchain/langchain/schema/runnable/retry.py b/libs/langchain/langchain/schema/runnable/retry.py index 611075435cd..c0c39a54d9a 100644 --- a/libs/langchain/langchain/schema/runnable/retry.py +++ b/libs/langchain/langchain/schema/runnable/retry.py @@ -89,7 +89,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): input: Input, run_manager: "CallbackManagerForChainRun", config: RunnableConfig, - **kwargs: Any + **kwargs: Any, ) -> Output: for attempt in self._sync_retrying(reraise=True): with attempt: @@ -112,7 +112,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): input: Input, run_manager: "AsyncCallbackManagerForChainRun", config: RunnableConfig, - **kwargs: Any + **kwargs: Any, ) -> Output: async for attempt in self._async_retrying(reraise=True): with attempt: @@ -135,7 +135,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): inputs: List[Input], run_manager: List["CallbackManagerForChainRun"], config: List[RunnableConfig], - **kwargs: Any + **kwargs: Any, ) -> List[Union[Output, Exception]]: results_map: Dict[int, Output] = {} @@ -190,7 +190,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, *, return_exceptions: bool = False, - **kwargs: Any + **kwargs: Any, ) -> List[Output]: return self._batch_with_config( self._batch, inputs, config, return_exceptions=return_exceptions, **kwargs @@ -201,7 +201,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): inputs: List[Input], run_manager: List["AsyncCallbackManagerForChainRun"], config: List[RunnableConfig], - **kwargs: Any + **kwargs: Any, ) -> List[Union[Output, Exception]]: results_map: Dict[int, Output] = {} @@ -256,7 +256,7 @@ class RunnableRetry(RunnableBinding[Input, Output]): config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, *, return_exceptions: bool = False, - **kwargs: Any + **kwargs: Any, ) -> List[Output]: return await self._abatch_with_config( self._abatch, inputs, config, return_exceptions=return_exceptions, **kwargs diff --git a/libs/langchain/langchain/smith/evaluation/config.py b/libs/langchain/langchain/smith/evaluation/config.py index cdbcc15de96..8fa57e778fe 100644 --- a/libs/langchain/langchain/smith/evaluation/config.py +++ b/libs/langchain/langchain/smith/evaluation/config.py @@ -322,7 +322,7 @@ class RunEvalConfig(BaseModel): self, criteria: Optional[CRITERIA_TYPE] = None, normalize_by: Optional[float] = None, - **kwargs: Any + **kwargs: Any, ) -> None: super().__init__(criteria=criteria, normalize_by=normalize_by, **kwargs) diff --git a/libs/langchain/langchain/text_splitter.py b/libs/langchain/langchain/text_splitter.py index 721ffab6be6..d8b712b22aa 100644 --- a/libs/langchain/langchain/text_splitter.py +++ b/libs/langchain/langchain/text_splitter.py @@ -412,8 +412,7 @@ class MarkdownHeaderTextSplitter: if stripped_line.startswith(sep) and ( # Header with no text OR header is followed by space # Both are valid conditions that sep is being used a header - len(stripped_line) == len(sep) - or stripped_line[len(sep)] == " " + len(stripped_line) == len(sep) or stripped_line[len(sep)] == " " ): # Ensure we are tracking the header as metadata if name is not None: diff --git a/libs/langchain/langchain/tools/e2b_data_analysis/unparse.py b/libs/langchain/langchain/tools/e2b_data_analysis/unparse.py index 9de05f9fe52..3f2486331f1 100644 --- a/libs/langchain/langchain/tools/e2b_data_analysis/unparse.py +++ b/libs/langchain/langchain/tools/e2b_data_analysis/unparse.py @@ -683,7 +683,7 @@ class Unparser: first = False else: self.write(", ") - self.dispatch(a), + (self.dispatch(a),) if d: self.write("=") self.dispatch(d) diff --git a/libs/langchain/langchain/utilities/bing_search.py b/libs/langchain/langchain/utilities/bing_search.py index 75dd964dbb2..33d6063b154 100644 --- a/libs/langchain/langchain/utilities/bing_search.py +++ b/libs/langchain/langchain/utilities/bing_search.py @@ -36,7 +36,9 @@ class BingSearchAPIWrapper(BaseModel): "textFormat": "HTML", } response = requests.get( - self.bing_search_url, headers=headers, params=params # type: ignore + self.bing_search_url, + headers=headers, + params=params, # type: ignore ) response.raise_for_status() search_results = response.json() diff --git a/libs/langchain/langchain/utils/openai_functions.py b/libs/langchain/langchain/utils/openai_functions.py index 0106894a839..05e648539db 100644 --- a/libs/langchain/langchain/utils/openai_functions.py +++ b/libs/langchain/langchain/utils/openai_functions.py @@ -19,7 +19,7 @@ def convert_pydantic_to_openai_function( model: Type[BaseModel], *, name: Optional[str] = None, - description: Optional[str] = None + description: Optional[str] = None, ) -> FunctionDescription: """Converts a Pydantic model to a function description for the OpenAI API.""" schema = dereference_refs(model.schema()) diff --git a/libs/langchain/langchain/vectorstores/azuresearch.py b/libs/langchain/langchain/vectorstores/azuresearch.py index 00abb5904fe..aa083f46856 100644 --- a/libs/langchain/langchain/vectorstores/azuresearch.py +++ b/libs/langchain/langchain/vectorstores/azuresearch.py @@ -116,12 +116,15 @@ def _get_search_client( - set(fields_types.items()) } if len(missing_fields) > 0: - fmt_err = lambda x: ( # noqa: E731 - f"{x} current type: '{fields_types.get(x, 'MISSING')}'. It has to " - f"be '{mandatory_fields.get(x)}' or you can point to a different " - f"'{mandatory_fields.get(x)}' field name by using the env variable " - f"'AZURESEARCH_FIELDS_{x.upper()}'" - ) + # Helper for formatting field information for each missing field. + def fmt_err(x: str) -> str: + return ( + f"{x} current type: '{fields_types.get(x, 'MISSING')}'. " + f"It has to be '{mandatory_fields.get(x)}' or you can point " + f"to a different '{mandatory_fields.get(x)}' field name by " + f"using the env variable 'AZURESEARCH_FIELDS_{x.upper()}'" + ) + error = "\n".join([fmt_err(x) for x in missing_fields]) raise ValueError( f"You need to specify at least the following fields " diff --git a/libs/langchain/langchain/vectorstores/qdrant.py b/libs/langchain/langchain/vectorstores/qdrant.py index 951748ba956..b364c991539 100644 --- a/libs/langchain/langchain/vectorstores/qdrant.py +++ b/libs/langchain/langchain/vectorstores/qdrant.py @@ -1592,9 +1592,7 @@ class Qdrant(VectorStore): f"If you want to recreate the collection, set `force_recreate` " f"parameter to `True`." ) - current_vector_config = current_vector_config.get( - vector_name - ) # type: ignore[assignment] + current_vector_config = current_vector_config.get(vector_name) # type: ignore[assignment] elif isinstance(current_vector_config, dict) and vector_name is None: raise QdrantException( f"Existing Qdrant collection {collection_name} uses named vectors. " @@ -1758,9 +1756,7 @@ class Qdrant(VectorStore): f"If you want to recreate the collection, set `force_recreate` " f"parameter to `True`." ) - current_vector_config = current_vector_config.get( - vector_name - ) # type: ignore[assignment] + current_vector_config = current_vector_config.get(vector_name) # type: ignore[assignment] elif isinstance(current_vector_config, dict) and vector_name is None: raise QdrantException( f"Existing Qdrant collection {collection_name} uses named vectors. " diff --git a/libs/langchain/langchain/vectorstores/rocksetdb.py b/libs/langchain/langchain/vectorstores/rocksetdb.py index 6c82f4a7f7a..a5fb5bda671 100644 --- a/libs/langchain/langchain/vectorstores/rocksetdb.py +++ b/libs/langchain/langchain/vectorstores/rocksetdb.py @@ -268,20 +268,16 @@ class Rockset(VectorStore): ) for k, v in document.items(): if k == self._text_key: - assert isinstance( - v, str - ), "page content stored in column `{}` must be of type `str`. \ - But found: `{}`".format( - self._text_key, type(v) - ) + assert isinstance(v, str), ( + "page content stored in column `{}` must be of type `str`. " + "But found: `{}`" + ).format(self._text_key, type(v)) page_content = v elif k == "dist": - assert isinstance( - v, float - ), "Computed distance between vectors must of type `float`. \ - But found {}".format( - type(v) - ) + assert isinstance(v, float), ( + "Computed distance between vectors must of type `float`. " + "But found {}" + ).format(type(v)) score = v elif k not in ["_id", "_event_time", "_meta"]: # These columns are populated by Rockset when documents are diff --git a/libs/langchain/langchain/vectorstores/utils.py b/libs/langchain/langchain/vectorstores/utils.py index 64ecf137713..7006379a791 100644 --- a/libs/langchain/langchain/vectorstores/utils.py +++ b/libs/langchain/langchain/vectorstores/utils.py @@ -57,7 +57,7 @@ def maximal_marginal_relevance( def filter_complex_metadata( documents: List[Document], *, - allowed_types: Tuple[Type, ...] = (str, bool, int, float) + allowed_types: Tuple[Type, ...] = (str, bool, int, float), ) -> List[Document]: """Filter out metadata types that are not supported for a vector store.""" updated_documents = [] diff --git a/libs/langchain/poetry.lock b/libs/langchain/poetry.lock index 38f0807dafc..2fa7da10fcc 100644 --- a/libs/langchain/poetry.lock +++ b/libs/langchain/poetry.lock @@ -895,48 +895,6 @@ files = [ [package.dependencies] pyparsing = ">=2.0.3" -[[package]] -name = "black" -version = "23.10.1" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "bleach" version = "6.1.0" @@ -2934,7 +2892,7 @@ files = [ {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"}, {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, @@ -2944,6 +2902,7 @@ files = [ {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, @@ -2976,7 +2935,6 @@ files = [ {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, ] [package.extras] @@ -4599,16 +4557,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -6083,17 +6031,6 @@ multiprocess = ">=0.70.15" pox = ">=0.3.3" ppft = ">=1.7.6.7" -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, -] - [[package]] name = "pdfminer-six" version = "20221105" @@ -7711,7 +7648,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -7719,15 +7655,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -7744,7 +7673,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -7752,7 +7680,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -11080,4 +11007,4 @@ text-helpers = ["chardet"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "d9f38367a43153fda9eaea5e8fd03acfc0a06006dfecc3693dffb212249197bc" +content-hash = "1c796fb6c531ed9803594973b7296fba64a22605f602f9646677bc07c5a39a85" diff --git a/libs/langchain/pyproject.toml b/libs/langchain/pyproject.toml index 8dbd32cf42b..cea9139786a 100644 --- a/libs/langchain/pyproject.toml +++ b/libs/langchain/pyproject.toml @@ -193,11 +193,10 @@ tiktoken = "^0.3.2" anthropic = "^0.3.11" [tool.poetry.group.lint.dependencies] -ruff = "^0.1" +ruff = "^0.1.3" types-toml = "^0.10.8.1" types-redis = "^4.3.21.6" types-pytz = "^2023.3.0.0" -black = "^23.10.0" types-chardet = "^5.0.4.6" mypy-protobuf = "^3.0.0" diff --git a/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py b/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py index b0bc62ffa3e..bb7dab0875b 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py @@ -186,7 +186,7 @@ def test_vertexai_args_passed(stop: Optional[str]) -> None: context=None, message_history=[], **prompt_params, - stop_sequences=expected_stop_sequence + stop_sequences=expected_stop_sequence, ) diff --git a/libs/langchain/tests/integration_tests/retrievers/test_zep.py b/libs/langchain/tests/integration_tests/retrievers/test_zep.py index 621ca0dd8ba..ff6f7e4eacf 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_zep.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_zep.py @@ -101,14 +101,10 @@ def _test_documents( assert document.page_content == search_results[i].message.get( # type: ignore "content" ) - assert document.metadata.get("uuid") == search_results[ - i - ].message.get( # type: ignore + assert document.metadata.get("uuid") == search_results[i].message.get( # type: ignore "uuid" ) - assert document.metadata.get("role") == search_results[ - i - ].message.get( # type: ignore + assert document.metadata.get("role") == search_results[i].message.get( # type: ignore "role" ) assert document.metadata.get("score") == search_results[i].dist diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_elasticsearch.py b/libs/langchain/tests/integration_tests/vectorstores/test_elasticsearch.py index 8601e8a5cb2..2bca8c4a7b8 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_elasticsearch.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_elasticsearch.py @@ -436,7 +436,10 @@ class TestElasticsearch: assert mmr_output[1].page_content == texts[1] mmr_output = docsearch.max_marginal_relevance_search( - texts[0], k=2, fetch_k=3, lambda_mult=0.1 # more diversity + texts[0], + k=2, + fetch_k=3, + lambda_mult=0.1, # more diversity ) assert len(mmr_output) == 2 assert mmr_output[0].page_content == texts[0] diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_redis.py b/libs/langchain/tests/integration_tests/vectorstores/test_redis.py index 28d524843ab..4b4cced03cd 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_redis.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_redis.py @@ -368,7 +368,10 @@ def test_max_marginal_relevance_search(texts: List[str]) -> None: assert mmr_output[1].page_content == texts[1] mmr_output = docsearch.max_marginal_relevance_search( - texts[0], k=2, fetch_k=3, lambda_mult=0.1 # more diversity + texts[0], + k=2, + fetch_k=3, + lambda_mult=0.1, # more diversity ) assert len(mmr_output) == 2 assert mmr_output[0].page_content == texts[0] diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_vearch.py b/libs/langchain/tests/integration_tests/vectorstores/test_vearch.py index e6c70efe3d6..759dffbe1ac 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_vearch.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_vearch.py @@ -57,7 +57,10 @@ def test_vearch() -> None: def test_vearch_add_texts() -> None: """Test end to end adding of texts.""" texts = [ - ("Vearch 是一款存储大语言模型数据的向量数据库,用于存储和快速搜索模型embedding后的向量," "可用于基于个人知识库的大模型应用"), + ( + "Vearch 是一款存储大语言模型数据的向量数据库,用于存储和快速搜索模型embedding后的向量," + "可用于基于个人知识库的大模型应用" + ), "Vearch 支持OpenAI, Llama, ChatGLM等模型,以及LangChain库", "vearch 是基于C语言,go语言开发的,并提供python接口,可以直接通过pip安装", ] diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_zep.py b/libs/langchain/tests/integration_tests/vectorstores/test_zep.py index b424ab9398d..4ed8245b841 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_zep.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_zep.py @@ -168,7 +168,7 @@ def test_from_texts( vs = zep_vectorstore.from_texts( **texts_metadatas, collection_name=mock_collection_config.name, - api_url="http://localhost:8000" + api_url="http://localhost:8000", ) vs._collection.add_documents.assert_called_once_with( # type: ignore diff --git a/libs/langchain/tests/unit_tests/agents/test_initialize.py b/libs/langchain/tests/unit_tests/agents/test_initialize.py index 04d3de9e20a..78235329d5e 100644 --- a/libs/langchain/tests/unit_tests/agents/test_initialize.py +++ b/libs/langchain/tests/unit_tests/agents/test_initialize.py @@ -16,7 +16,9 @@ def test_initialize_agent_with_str_agent_type() -> None: """Test initialize_agent with a string.""" fake_llm = FakeLLM() agent_executor = initialize_agent( - [my_tool], fake_llm, "zero-shot-react-description" # type: ignore + [my_tool], # type: ignore[list-item] + fake_llm, + "zero-shot-react-description", # type: ignore[arg-type] ) assert agent_executor.agent._agent_type == AgentType.ZERO_SHOT_REACT_DESCRIPTION assert isinstance(agent_executor.tags, list) diff --git a/libs/langchain/tests/unit_tests/document_loaders/test_detect_encoding.py b/libs/langchain/tests/unit_tests/document_loaders/test_detect_encoding.py index 67efc11dbef..81b77ceb77b 100644 --- a/libs/langchain/tests/unit_tests/document_loaders/test_detect_encoding.py +++ b/libs/langchain/tests/unit_tests/document_loaders/test_detect_encoding.py @@ -47,7 +47,9 @@ def test_loader_detect_encoding_csv() -> None: row_count -= 1 loader = DirectoryLoader( - str(path), glob="**/*.csv", loader_cls=CSVLoader # type: ignore + str(path), + glob="**/*.csv", + loader_cls=CSVLoader, # type: ignore ) loader_detect_encoding = DirectoryLoader( str(path), diff --git a/libs/langchain/tests/unit_tests/prompts/test_chat.py b/libs/langchain/tests/unit_tests/prompts/test_chat.py index 9ad0ba02588..bbdca9d75ec 100644 --- a/libs/langchain/tests/unit_tests/prompts/test_chat.py +++ b/libs/langchain/tests/unit_tests/prompts/test_chat.py @@ -168,9 +168,9 @@ def test_chat_prompt_template_from_messages_using_role_strings() -> None: def test_chat_prompt_template_with_messages() -> None: - messages: List[ - Union[BaseMessagePromptTemplate, BaseMessage] - ] = create_messages() + [HumanMessage(content="foo")] + messages: List[Union[BaseMessagePromptTemplate, BaseMessage]] = ( + create_messages() + [HumanMessage(content="foo")] + ) chat_prompt_template = ChatPromptTemplate.from_messages(messages) assert sorted(chat_prompt_template.input_variables) == sorted( ["context", "foo", "bar"] diff --git a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py index 583059ccb1f..ae0cab161f7 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py +++ b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py @@ -617,21 +617,27 @@ def test_lambda_schemas() -> None: "byebye": input["yo"], } - assert RunnableLambda(aget_values_typed).input_schema.schema() == { # type: ignore[arg-type] - "title": "RunnableLambdaInput", - "$ref": "#/definitions/InputType", - "definitions": { - "InputType": { - "properties": { - "variable_name": {"title": "Variable " "Name", "type": "string"}, - "yo": {"title": "Yo", "type": "integer"}, - }, - "required": ["variable_name", "yo"], - "title": "InputType", - "type": "object", - } - }, - } + assert ( + RunnableLambda(aget_values_typed).input_schema.schema() # type: ignore[arg-type] + == { + "title": "RunnableLambdaInput", + "$ref": "#/definitions/InputType", + "definitions": { + "InputType": { + "properties": { + "variable_name": { + "title": "Variable " "Name", + "type": "string", + }, + "yo": {"title": "Yo", "type": "integer"}, + }, + "required": ["variable_name", "yo"], + "title": "InputType", + "type": "object", + } + }, + } + ) assert RunnableLambda(aget_values_typed).output_schema.schema() == { # type: ignore[arg-type] "title": "RunnableLambdaOutput", @@ -660,10 +666,12 @@ def test_with_types_with_type_generics() -> None: # Try specifying some RunnableLambda(foo).with_types( - output_type=List[int], input_type=List[int] # type: ignore + output_type=List[int], # type: ignore[arg-type] + input_type=List[int], # type: ignore[arg-type] ) RunnableLambda(foo).with_types( - output_type=Sequence[int], input_type=Sequence[int] # type: ignore[arg-type] + output_type=Sequence[int], # type: ignore[arg-type] + input_type=Sequence[int], # type: ignore[arg-type] ) diff --git a/libs/langchain/tests/unit_tests/schema/test_messages.py b/libs/langchain/tests/unit_tests/schema/test_messages.py index 36c22730e87..b72c10010df 100644 --- a/libs/langchain/tests/unit_tests/schema/test_messages.py +++ b/libs/langchain/tests/unit_tests/schema/test_messages.py @@ -15,33 +15,39 @@ def test_message_chunks() -> None: content="I am indeed." ), "MessageChunk + MessageChunk should be a MessageChunk" - assert AIMessageChunk(content="I am") + HumanMessageChunk( - content=" indeed." - ) == AIMessageChunk( - content="I am indeed." + assert ( + AIMessageChunk(content="I am") + HumanMessageChunk(content=" indeed.") + == AIMessageChunk(content="I am indeed.") ), "MessageChunk + MessageChunk should be a MessageChunk of same class as the left side" # noqa: E501 - assert AIMessageChunk( - content="", additional_kwargs={"foo": "bar"} - ) + AIMessageChunk(content="", additional_kwargs={"baz": "foo"}) == AIMessageChunk( - content="", additional_kwargs={"foo": "bar", "baz": "foo"} + assert ( + AIMessageChunk(content="", additional_kwargs={"foo": "bar"}) + + AIMessageChunk(content="", additional_kwargs={"baz": "foo"}) + == AIMessageChunk(content="", additional_kwargs={"foo": "bar", "baz": "foo"}) ), "MessageChunk + MessageChunk should be a MessageChunk with merged additional_kwargs" # noqa: E501 - assert AIMessageChunk( - content="", additional_kwargs={"function_call": {"name": "web_search"}} - ) + AIMessageChunk( - content="", additional_kwargs={"function_call": {"arguments": "{\n"}} - ) + AIMessageChunk( - content="", - additional_kwargs={"function_call": {"arguments": ' "query": "turtles"\n}'}}, - ) == AIMessageChunk( - content="", - additional_kwargs={ - "function_call": { - "name": "web_search", - "arguments": '{\n "query": "turtles"\n}', - } - }, + assert ( + AIMessageChunk( + content="", additional_kwargs={"function_call": {"name": "web_search"}} + ) + + AIMessageChunk( + content="", additional_kwargs={"function_call": {"arguments": "{\n"}} + ) + + AIMessageChunk( + content="", + additional_kwargs={ + "function_call": {"arguments": ' "query": "turtles"\n}'} + }, + ) + == AIMessageChunk( + content="", + additional_kwargs={ + "function_call": { + "name": "web_search", + "arguments": '{\n "query": "turtles"\n}', + } + }, + ) ), "MessageChunk + MessageChunk should be a MessageChunk with merged additional_kwargs" # noqa: E501 @@ -57,10 +63,10 @@ def test_chat_message_chunks() -> None: role="Assistant", content=" indeed." ) - assert ChatMessageChunk(role="User", content="I am") + AIMessageChunk( - content=" indeed." - ) == ChatMessageChunk( - role="User", content="I am indeed." + assert ( + ChatMessageChunk(role="User", content="I am") + + AIMessageChunk(content=" indeed.") + == ChatMessageChunk(role="User", content="I am indeed.") ), "ChatMessageChunk + other MessageChunk should be a ChatMessageChunk with the left side's role" # noqa: E501 assert AIMessageChunk(content="I am") + ChatMessageChunk( diff --git a/libs/langchain/tests/unit_tests/schema/test_output.py b/libs/langchain/tests/unit_tests/schema/test_output.py index 2d19f9370f9..84ec53cdddd 100644 --- a/libs/langchain/tests/unit_tests/schema/test_output.py +++ b/libs/langchain/tests/unit_tests/schema/test_output.py @@ -9,16 +9,19 @@ def test_generation_chunk() -> None: text="Hello, world!" ), "GenerationChunk + GenerationChunk should be a GenerationChunk" - assert GenerationChunk(text="Hello, ") + GenerationChunk( - text="world!", generation_info={"foo": "bar"} - ) == GenerationChunk( - text="Hello, world!", generation_info={"foo": "bar"} + assert ( + GenerationChunk(text="Hello, ") + + GenerationChunk(text="world!", generation_info={"foo": "bar"}) + == GenerationChunk(text="Hello, world!", generation_info={"foo": "bar"}) ), "GenerationChunk + GenerationChunk should be a GenerationChunk with merged generation_info" # noqa: E501 - assert GenerationChunk(text="Hello, ") + GenerationChunk( - text="world!", generation_info={"foo": "bar"} - ) + GenerationChunk(text="!", generation_info={"baz": "foo"}) == GenerationChunk( - text="Hello, world!!", generation_info={"foo": "bar", "baz": "foo"} + assert ( + GenerationChunk(text="Hello, ") + + GenerationChunk(text="world!", generation_info={"foo": "bar"}) + + GenerationChunk(text="!", generation_info={"baz": "foo"}) + == GenerationChunk( + text="Hello, world!!", generation_info={"foo": "bar", "baz": "foo"} + ) ), "GenerationChunk + GenerationChunk should be a GenerationChunk with merged generation_info" # noqa: E501 @@ -31,22 +34,27 @@ def test_chat_generation_chunk() -> None: message=HumanMessageChunk(content="Hello, world!") ), "ChatGenerationChunk + ChatGenerationChunk should be a ChatGenerationChunk" - assert ChatGenerationChunk( - message=HumanMessageChunk(content="Hello, ") - ) + ChatGenerationChunk( - message=HumanMessageChunk(content="world!"), generation_info={"foo": "bar"} - ) == ChatGenerationChunk( - message=HumanMessageChunk(content="Hello, world!"), - generation_info={"foo": "bar"}, + assert ( + ChatGenerationChunk(message=HumanMessageChunk(content="Hello, ")) + + ChatGenerationChunk( + message=HumanMessageChunk(content="world!"), generation_info={"foo": "bar"} + ) + == ChatGenerationChunk( + message=HumanMessageChunk(content="Hello, world!"), + generation_info={"foo": "bar"}, + ) ), "GenerationChunk + GenerationChunk should be a GenerationChunk with merged generation_info" # noqa: E501 - assert ChatGenerationChunk( - message=HumanMessageChunk(content="Hello, ") - ) + ChatGenerationChunk( - message=HumanMessageChunk(content="world!"), generation_info={"foo": "bar"} - ) + ChatGenerationChunk( - message=HumanMessageChunk(content="!"), generation_info={"baz": "foo"} - ) == ChatGenerationChunk( - message=HumanMessageChunk(content="Hello, world!!"), - generation_info={"foo": "bar", "baz": "foo"}, + assert ( + ChatGenerationChunk(message=HumanMessageChunk(content="Hello, ")) + + ChatGenerationChunk( + message=HumanMessageChunk(content="world!"), generation_info={"foo": "bar"} + ) + + ChatGenerationChunk( + message=HumanMessageChunk(content="!"), generation_info={"baz": "foo"} + ) + == ChatGenerationChunk( + message=HumanMessageChunk(content="Hello, world!!"), + generation_info={"foo": "bar", "baz": "foo"}, + ) ), "GenerationChunk + GenerationChunk should be a GenerationChunk with merged generation_info" # noqa: E501 diff --git a/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py b/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py index c3f7b4f4434..4974deefb20 100644 --- a/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py +++ b/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py @@ -318,9 +318,7 @@ async def test_arun_on_dataset(monkeypatch: pytest.MonkeyPatch) -> None: ), mock.patch.object(Client, "list_examples", new=mock_list_examples), mock.patch( "langchain.smith.evaluation.runner_utils._arun_llm_or_chain", new=mock_arun_chain, - ), mock.patch.object( - Client, "create_project", new=mock_create_project - ): + ), mock.patch.object(Client, "create_project", new=mock_create_project): client = Client(api_url="http://localhost:1984", api_key="123") chain = mock.MagicMock() chain.input_keys = ["foothing"] diff --git a/poetry.lock b/poetry.lock index 35b6eee659d..01394aef829 100644 --- a/poetry.lock +++ b/poetry.lock @@ -987,7 +987,7 @@ files = [ {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"}, {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, @@ -997,6 +997,7 @@ files = [ {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, @@ -1029,7 +1030,6 @@ files = [ {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, ] [package.extras] @@ -1627,7 +1627,7 @@ files = [ [[package]] name = "langchain" -version = "0.0.325" +version = "0.0.326" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1649,7 +1649,7 @@ SQLAlchemy = ">=1.4,<3" tenacity = "^8.1.0" [package.extras] -all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.10.1,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] +all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.8.3,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.10.1,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (>=0,<1)"] clarifai = ["clarifai (>=9.1.0)"] cli = ["typer (>=0.9.0,<0.10.0)"] @@ -1764,16 +1764,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2749,7 +2739,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2757,15 +2746,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2782,7 +2764,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2790,7 +2771,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3915,4 +3895,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "47b6bed43d64a2d4690d4554c78481485822b4b226a6e7f8a456dddb4ce8dd20" +content-hash = "e5c7a1f126afec578c3cd010d3d2131b96f5283a3402af5bb5084408cc7f7444" diff --git a/pyproject.toml b/pyproject.toml index ce052f0042e..3124f90702e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,9 +29,7 @@ sphinx-copybutton = "^0.5.1" nbdoc = "^0.0.82" [tool.poetry.group.lint.dependencies] -ruff = "^0.1" -black = "^23.10.0" - +ruff = "^0.1.3" [tool.poetry.group.codespell.dependencies] codespell = "^2.2.0"