core[minor]: moved fake llms and embeddings to core (#19226)

- [ ] **PR title**: "core: moved fake llms and embeddings to core"


- [ ] **PR message**:
 - **Description:** moved fake llms and embeddings to core"
This commit is contained in:
Leonid Kuligin 2024-03-18 18:01:26 +01:00 committed by GitHub
parent 514fe80778
commit 366ba77459
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 392 additions and 351 deletions

View File

@ -0,0 +1,4 @@
from langchain_core.embeddings.embeddings import Embeddings
from langchain_core.embeddings.fake import DeterministicFakeEmbedding, FakeEmbeddings
__all__ = ["DeterministicFakeEmbedding", "Embeddings", "FakeEmbeddings"]

View File

@ -0,0 +1,52 @@
import hashlib
from typing import List
from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel
class FakeEmbeddings(Embeddings, BaseModel):
"""Fake embedding model."""
size: int
"""The size of the embedding vector."""
def _get_embedding(self) -> List[float]:
import numpy as np # type: ignore[import-not-found, import-untyped]
return list(np.random.normal(size=self.size))
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return [self._get_embedding() for _ in texts]
def embed_query(self, text: str) -> List[float]:
return self._get_embedding()
class DeterministicFakeEmbedding(Embeddings, BaseModel):
"""
Fake embedding model that always returns
the same embedding vector for the same text.
"""
size: int
"""The size of the embedding vector."""
def _get_embedding(self, seed: int) -> List[float]:
import numpy as np # type: ignore[import-not-found, import-untyped]
# set the seed for the random generator
np.random.seed(seed)
return list(np.random.normal(size=self.size))
def _get_seed(self, text: str) -> int:
"""
Get a seed for the random generator, using the hash of the text.
"""
return int(hashlib.sha256(text.encode("utf-8")).hexdigest(), 16) % 10**8
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return [self._get_embedding(seed=self._get_seed(_)) for _ in texts]
def embed_query(self, text: str) -> List[float]:
return self._get_embedding(seed=self._get_seed(text))

View File

@ -30,6 +30,13 @@ from langchain_core.language_models.base import (
get_tokenizer,
)
from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel
from langchain_core.language_models.fake import FakeListLLM, FakeStreamingListLLM
from langchain_core.language_models.fake_chat_models import (
FakeListChatModel,
FakeMessagesListChatModel,
GenericFakeChatModel,
ParrotFakeChatModel,
)
from langchain_core.language_models.llms import LLM, BaseLLM
__all__ = [
@ -42,4 +49,10 @@ __all__ = [
"get_tokenizer",
"LanguageModelOutput",
"LanguageModelLike",
"FakeListLLM",
"FakeStreamingListLLM",
"FakeListChatModel",
"FakeMessagesListChatModel",
"GenericFakeChatModel",
"ParrotFakeChatModel",
]

View File

@ -2,11 +2,12 @@ import asyncio
import time
from typing import Any, AsyncIterator, Iterator, List, Mapping, Optional
from langchain_core.callbacks.manager import (
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain_core.language_models import LLM, LanguageModelInput
from langchain_core.language_models import LanguageModelInput
from langchain_core.language_models.llms import LLM
from langchain_core.runnables import RunnableConfig

View File

@ -1,19 +1,15 @@
"""Fake Chat Model wrapper for testing purposes."""
"""Fake ChatModel for testing purposes."""
import asyncio
import re
import time
from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union, cast
from langchain_core.callbacks.manager import (
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
BaseMessage,
)
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.runnables import run_in_executor

109
libs/core/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "annotated-types"
@ -679,13 +679,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs
[[package]]
name = "importlib-resources"
version = "6.1.3"
version = "6.3.1"
description = "Read resources from Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "importlib_resources-6.1.3-py3-none-any.whl", hash = "sha256:4c0269e3580fe2634d364b39b38b961540a7738c02cb984e98add8b4221d793d"},
{file = "importlib_resources-6.1.3.tar.gz", hash = "sha256:56fb4525197b78544a3354ea27793952ab93f935bb4bf746b846bb1015020f2b"},
{file = "importlib_resources-6.3.1-py3-none-any.whl", hash = "sha256:4811639ca7fa830abdb8e9ca0a104dc6ad13de691d9fe0d3173a71304f068159"},
{file = "importlib_resources-6.3.1.tar.gz", hash = "sha256:29a3d16556e330c3c8fb8202118c5ff41241cc34cbfb25989bbad226d99b7995"},
]
[package.dependencies]
@ -851,18 +851,15 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "json5"
version = "0.9.22"
version = "0.9.24"
description = "A Python implementation of the JSON5 data format."
optional = false
python-versions = ">=3.8"
files = [
{file = "json5-0.9.22-py3-none-any.whl", hash = "sha256:6621007c70897652f8b5d03885f732771c48d1925591ad989aa80c7e0e5ad32f"},
{file = "json5-0.9.22.tar.gz", hash = "sha256:b729bde7650b2196a35903a597d2b704b8fdf8648bfb67368cfb79f1174a17bd"},
{file = "json5-0.9.24-py3-none-any.whl", hash = "sha256:4ca101fd5c7cb47960c055ef8f4d0e31e15a7c6c48c3b6f1473fc83b6c462a13"},
{file = "json5-0.9.24.tar.gz", hash = "sha256:0c638399421da959a20952782800e5c1a78c14e08e1dc9738fa10d8ec14d58c8"},
]
[package.extras]
dev = ["hypothesis"]
[[package]]
name = "jsonpatch"
version = "1.33"
@ -1118,13 +1115,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>
[[package]]
name = "jupyterlab"
version = "4.1.4"
version = "4.1.5"
description = "JupyterLab computational environment"
optional = false
python-versions = ">=3.8"
files = [
{file = "jupyterlab-4.1.4-py3-none-any.whl", hash = "sha256:f92c3f2b12b88efcf767205f49be9b2f86b85544f9c4f342bb5e9904a16cf931"},
{file = "jupyterlab-4.1.4.tar.gz", hash = "sha256:e03c82c124ad8a0892e498b9dde79c50868b2c267819aca3f55ce47c57ebeb1d"},
{file = "jupyterlab-4.1.5-py3-none-any.whl", hash = "sha256:3bc843382a25e1ab7bc31d9e39295a9f0463626692b7995597709c0ab236ab2c"},
{file = "jupyterlab-4.1.5.tar.gz", hash = "sha256:c9ad75290cb10bfaff3624bf3fbb852319b4cce4c456613f8ebbaa98d03524db"},
]
[package.dependencies]
@ -1219,13 +1216,13 @@ url = "../text-splitters"
[[package]]
name = "langsmith"
version = "0.1.23"
version = "0.1.27"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langsmith-0.1.23-py3-none-any.whl", hash = "sha256:69984268b9867cb31b875965b3f86b6f56ba17dd5454d487d3a1a999bdaeea69"},
{file = "langsmith-0.1.23.tar.gz", hash = "sha256:327c66ec0de8c1bc57bfa47bbc70a29ef749e97c3e5571b9baf754d1e0644220"},
{file = "langsmith-0.1.27-py3-none-any.whl", hash = "sha256:d223176952b1525c958189ab1b894f5bd9891ec9177222f7a978aeee4bf1cc95"},
{file = "langsmith-0.1.27.tar.gz", hash = "sha256:e0a339d976362051adf3fdbc43fcc7c00bb4615a401321ad7e556bd2dab556c0"},
]
[package.dependencies]
@ -1387,13 +1384,13 @@ files = [
[[package]]
name = "nbclient"
version = "0.9.1"
version = "0.10.0"
description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor."
optional = false
python-versions = ">=3.8.0"
files = [
{file = "nbclient-0.9.1-py3-none-any.whl", hash = "sha256:2c50a866e8dd6c5f655de47d2e252c82d2ebe978574e760ac229f5950593a434"},
{file = "nbclient-0.9.1.tar.gz", hash = "sha256:4f7b78c6c2a380e228f8a3bb469b847cb24e5b8ad6fda410691b5621e05ce5a2"},
{file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"},
{file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"},
]
[package.dependencies]
@ -1447,13 +1444,13 @@ webpdf = ["playwright"]
[[package]]
name = "nbformat"
version = "5.10.2"
version = "5.10.3"
description = "The Jupyter Notebook format"
optional = false
python-versions = ">=3.8"
files = [
{file = "nbformat-5.10.2-py3-none-any.whl", hash = "sha256:7381189a0d537586b3f18bae5dbad347d7dd0a7cf0276b09cdcd5c24d38edd99"},
{file = "nbformat-5.10.2.tar.gz", hash = "sha256:c535b20a0d4310167bf4d12ad31eccfb0dc61e6392d6f8c570ab5b45a06a49a3"},
{file = "nbformat-5.10.3-py3-none-any.whl", hash = "sha256:d9476ca28676799af85385f409b49d95e199951477a159a576ef2a675151e5e8"},
{file = "nbformat-5.10.3.tar.gz", hash = "sha256:60ed5e910ef7c6264b87d644f276b1b49e24011930deef54605188ddeb211685"},
]
[package.dependencies]
@ -1479,13 +1476,13 @@ files = [
[[package]]
name = "notebook"
version = "7.1.1"
version = "7.1.2"
description = "Jupyter Notebook - A web-based notebook environment for interactive computing"
optional = false
python-versions = ">=3.8"
files = [
{file = "notebook-7.1.1-py3-none-any.whl", hash = "sha256:197d8e0595acabf4005851c8716e952a81b405f7aefb648067a761fbde267ce7"},
{file = "notebook-7.1.1.tar.gz", hash = "sha256:818e7420fa21f402e726afb9f02df7f3c10f294c02e383ed19852866c316108b"},
{file = "notebook-7.1.2-py3-none-any.whl", hash = "sha256:fc6c24b9aef18d0cd57157c9c47e95833b9b0bdc599652639acf0bdb61dc7d5f"},
{file = "notebook-7.1.2.tar.gz", hash = "sha256:efc2c80043909e0faa17fce9e9b37c059c03af0ec99a4d4db84cb21d9d2e936a"},
]
[package.dependencies]
@ -1517,6 +1514,43 @@ jupyter-server = ">=1.8,<3"
[package.extras]
test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"]
[[package]]
name = "numpy"
version = "1.24.4"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"},
{file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"},
{file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"},
{file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"},
{file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"},
{file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"},
{file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"},
{file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"},
{file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"},
{file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"},
{file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"},
{file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"},
{file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"},
{file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"},
{file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"},
{file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"},
{file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"},
{file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"},
{file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"},
{file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"},
{file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"},
{file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"},
{file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"},
{file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"},
{file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"},
{file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"},
{file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"},
{file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"},
]
[[package]]
name = "orjson"
version = "3.9.15"
@ -2111,7 +2145,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -2295,13 +2328,13 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"]
[[package]]
name = "referencing"
version = "0.33.0"
version = "0.34.0"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"},
{file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"},
{file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"},
{file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"},
]
[package.dependencies]
@ -2731,13 +2764,13 @@ files = [
[[package]]
name = "types-python-dateutil"
version = "2.8.19.20240311"
version = "2.9.0.20240316"
description = "Typing stubs for python-dateutil"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-python-dateutil-2.8.19.20240311.tar.gz", hash = "sha256:51178227bbd4cbec35dc9adffbf59d832f20e09842d7dcb8c73b169b8780b7cb"},
{file = "types_python_dateutil-2.8.19.20240311-py3-none-any.whl", hash = "sha256:ef813da0809aca76472ca88807addbeea98b19339aebe56159ae2f4b4f70857a"},
{file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"},
{file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"},
]
[[package]]
@ -2914,18 +2947,18 @@ files = [
[[package]]
name = "zipp"
version = "3.17.0"
version = "3.18.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
{file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
{file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
{file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[extras]
extended-testing = ["jinja2"]
@ -2933,4 +2966,4 @@ extended-testing = ["jinja2"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "9d6e9c9613b31dbbe35772bf8d8d5aaba637228de7abbf4a7b271971c2a81ba9"
content-hash = "ca611429e3dd84ce6dac7ef69d7d9b4da78bf467356946e37016b821e5fe752e"

View File

@ -60,6 +60,7 @@ pytest-asyncio = "^0.21.1"
grandalf = "^0.8"
pytest-profiling = "^1.7.0"
responses = "^0.25.0"
numpy = "^1.24.0"
[tool.poetry.group.test_integration]

View File

@ -0,0 +1,16 @@
from langchain_core.embeddings import DeterministicFakeEmbedding
def test_deterministic_fake_embeddings() -> None:
"""
Test that the deterministic fake embeddings return the same
embedding vector for the same text.
"""
fake = DeterministicFakeEmbedding(size=10)
text = "Hello world!"
assert fake.embed_query(text) == fake.embed_query(text)
assert fake.embed_query(text) != fake.embed_query("Goodbye world!")
assert fake.embed_documents([text, text]) == fake.embed_documents([text, text])
assert fake.embed_documents([text, text]) != fake.embed_documents(
[text, "Goodbye world!"]
)

View File

@ -4,10 +4,10 @@ from typing import Any, Dict, List, Optional, Union
from uuid import UUID
from langchain_core.callbacks.base import AsyncCallbackHandler
from langchain_core.language_models import GenericFakeChatModel, ParrotFakeChatModel
from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage
from langchain_core.messages.human import HumanMessage
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from tests.unit_tests.fake.chat_model import GenericFakeChatModel, ParrotFakeChatModel
def test_generic_fake_chat_model_invoke() -> None:

View File

@ -5,7 +5,7 @@ from typing import Any, AsyncIterator, Iterator, List, Optional
import pytest
from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import BaseChatModel
from langchain_core.language_models import BaseChatModel, FakeListChatModel
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
@ -21,7 +21,6 @@ from tests.unit_tests.fake.callbacks import (
FakeAsyncCallbackHandler,
FakeCallbackHandler,
)
from tests.unit_tests.fake.chat_model import FakeListChatModel
@pytest.fixture

View File

@ -6,7 +6,7 @@ from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain_core.language_models.llms import BaseLLM
from langchain_core.language_models import BaseLLM, FakeListLLM, FakeStreamingListLLM
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.tracers.context import collect_runs
from tests.unit_tests.fake.callbacks import (
@ -14,7 +14,6 @@ from tests.unit_tests.fake.callbacks import (
FakeAsyncCallbackHandler,
FakeCallbackHandler,
)
from tests.unit_tests.fake.llm import FakeListLLM, FakeStreamingListLLM
def test_batch() -> None:

View File

@ -11,6 +11,12 @@ EXPECTED_ALL = [
"LanguageModelLike",
"get_tokenizer",
"LanguageModelLike",
"FakeMessagesListChatModel",
"FakeListChatModel",
"GenericFakeChatModel",
"FakeStreamingListLLM",
"FakeListLLM",
"ParrotFakeChatModel",
]

View File

@ -2,13 +2,13 @@
from typing import List
from langchain_core.exceptions import OutputParserException
from langchain_core.language_models import GenericFakeChatModel
from langchain_core.messages import AIMessage
from langchain_core.output_parsers import (
BaseGenerationOutputParser,
BaseTransformOutputParser,
)
from langchain_core.outputs import ChatGeneration, Generation
from tests.unit_tests.fake.chat_model import GenericFakeChatModel
def test_base_generation_parser() -> None:

View File

@ -2,12 +2,12 @@ from functools import partial
from inspect import isclass
from typing import Any, Dict, Type, Union, cast
from langchain_core.language_models import FakeListChatModel
from langchain_core.load.dump import dumps
from langchain_core.load.load import loads
from langchain_core.prompts.structured import StructuredPrompt
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables.base import Runnable, RunnableLambda
from tests.unit_tests.fake.chat_model import FakeListChatModel
def _fake_runnable(

View File

@ -606,10 +606,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['foo'], i=1)",
@ -1083,10 +1082,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -1150,10 +1148,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -1694,10 +1691,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['bar'])",
@ -2171,10 +2167,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -2238,10 +2233,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -2713,10 +2707,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['foo'], i=1)",
@ -3190,10 +3183,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -3225,10 +3217,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['bar'])",
@ -3702,10 +3693,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -4263,10 +4253,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['foo'], i=1)",
@ -4740,10 +4729,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -4775,10 +4763,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['baz'], i=1)",
@ -5252,10 +5239,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"
@ -5286,10 +5272,9 @@
"lc": 1,
"type": "not_implemented",
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"repr": "FakeListLLM(responses=['bar'])",
@ -5763,10 +5748,9 @@
"type": "runnable",
"data": {
"id": [
"tests",
"unit_tests",
"langchain_core",
"language_models",
"fake",
"llm",
"FakeListLLM"
],
"name": "FakeListLLM"

File diff suppressed because one or more lines are too long

View File

@ -3,13 +3,13 @@ from typing import Any, Callable, List, NamedTuple, Union
import pytest
from langchain_core.beta.runnables.context import Context
from langchain_core.language_models import FakeListLLM, FakeStreamingListLLM
from langchain_core.output_parsers.string import StrOutputParser
from langchain_core.prompt_values import StringPromptValue
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.runnables.base import Runnable, RunnableLambda
from langchain_core.runnables.passthrough import RunnablePassthrough
from langchain_core.runnables.utils import aadd, add
from tests.unit_tests.fake.llm import FakeListLLM, FakeStreamingListLLM
class _TestCase(NamedTuple):

View File

@ -4,6 +4,7 @@ from typing import Any, AsyncIterator, Iterator
import pytest
from syrupy import SnapshotAssertion
from langchain_core.language_models import FakeListLLM
from langchain_core.load import dumps
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import (
@ -14,7 +15,6 @@ from langchain_core.runnables import (
RunnablePassthrough,
RunnableWithFallbacks,
)
from tests.unit_tests.fake.llm import FakeListLLM
@pytest.fixture()

View File

@ -1,11 +1,11 @@
from syrupy import SnapshotAssertion
from langchain_core.language_models import FakeListLLM
from langchain_core.output_parsers.list import CommaSeparatedListOutputParser
from langchain_core.output_parsers.string import StrOutputParser
from langchain_core.output_parsers.xml import XMLOutputParser
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.runnables.base import Runnable
from tests.unit_tests.fake.llm import FakeListLLM
def test_graph_single_runnable(snapshot: SnapshotAssertion) -> None:
@ -54,7 +54,7 @@ def test_graph_sequence(snapshot: SnapshotAssertion) -> None:
"id": 2,
"type": "runnable",
"data": {
"id": ["tests", "unit_tests", "fake", "llm", "FakeListLLM"],
"id": ["langchain_core", "language_models", "fake", "FakeListLLM"],
"name": "FakeListLLM",
},
},
@ -136,7 +136,7 @@ def test_graph_sequence_map(snapshot: SnapshotAssertion) -> None:
"id": 2,
"type": "runnable",
"data": {
"id": ["tests", "unit_tests", "fake", "llm", "FakeListLLM"],
"id": ["langchain_core", "language_models", "fake", "FakeListLLM"],
"name": "FakeListLLM",
},
},

View File

@ -28,6 +28,11 @@ from langchain_core.callbacks.manager import (
trace_as_chain_group,
)
from langchain_core.documents import Document
from langchain_core.language_models import (
FakeListChatModel,
FakeListLLM,
FakeStreamingListLLM,
)
from langchain_core.load import dumpd, dumps
from langchain_core.messages import (
AIMessage,
@ -80,8 +85,6 @@ from langchain_core.tracers import (
RunLogPatch,
)
from langchain_core.tracers.context import collect_runs
from tests.unit_tests.fake.chat_model import FakeListChatModel
from tests.unit_tests.fake.llm import FakeListLLM, FakeStreamingListLLM
class FakeTracer(BaseTracer):

View File

@ -7,6 +7,7 @@ import pytest
from langchain_core.callbacks import CallbackManagerForRetrieverRun, Callbacks
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.documents import Document
from langchain_core.language_models import FakeStreamingListLLM, GenericFakeChatModel
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
@ -26,8 +27,6 @@ from langchain_core.runnables import (
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.runnables.schema import StreamEvent
from langchain_core.tools import tool
from tests.unit_tests.fake.chat_model import GenericFakeChatModel
from tests.unit_tests.fake.llm import FakeStreamingListLLM
def _with_nulled_run_id(events: Sequence[StreamEvent]) -> List[StreamEvent]:

View File

@ -2,8 +2,8 @@
import uuid
from langchain_core.language_models import FakeListLLM
from langchain_core.tracers.context import collect_runs
from tests.unit_tests.fake.llm import FakeListLLM
def test_collect_runs() -> None: