Compare commits

...

1 Commits

Author SHA1 Message Date
vowelparrot
9696d9a51c Change Name to LangSmith 2023-06-26 16:57:14 -07:00
11 changed files with 43 additions and 42 deletions

View File

@@ -8,7 +8,7 @@ from datetime import datetime
from typing import Any, Dict, List, Optional, Set, Union
from uuid import UUID
from langchainplus_sdk import LangChainPlusClient
from langsmith import Client as LangSmithClient
from langchain.callbacks.tracers.base import BaseTracer
from langchain.callbacks.tracers.schemas import (
@@ -46,7 +46,7 @@ class LangChainTracer(BaseTracer):
self,
example_id: Optional[Union[UUID, str]] = None,
project_name: Optional[str] = None,
client: Optional[LangChainPlusClient] = None,
client: Optional[LangSmithClient] = None,
**kwargs: Any,
) -> None:
"""Initialize the LangChain tracer."""
@@ -60,7 +60,7 @@ class LangChainTracer(BaseTracer):
)
# set max_workers to 1 to process tasks in order
self.executor = ThreadPoolExecutor(max_workers=1)
self.client = client or LangChainPlusClient()
self.client = client or LangSmithClient()
self._futures: Set[Future] = set()
global _TRACERS
_TRACERS.append(self)

View File

@@ -5,8 +5,8 @@ import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from langchainplus_sdk.schemas import RunBase as BaseRunV2
from langchainplus_sdk.schemas import RunTypeEnum
from langsmith.schemas import RunBase as BaseRunV2
from langsmith.schemas import RunTypeEnum
from pydantic import BaseModel, Field, root_validator
from langchain.schema import LLMResult

View File

@@ -16,8 +16,8 @@ from typing import (
Union,
)
from langchainplus_sdk import LangChainPlusClient
from langchainplus_sdk.schemas import Example
from langsmith import Client as LangSmithClient
from langsmith.schemas import Example
from langchain.base_language import BaseLanguageModel
from langchain.callbacks.base import BaseCallbackHandler
@@ -448,7 +448,7 @@ async def arun_on_dataset(
num_repetitions: int = 1,
project_name: Optional[str] = None,
verbose: bool = False,
client: Optional[LangChainPlusClient] = None,
client: Optional[LangSmithClient] = None,
tags: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""
@@ -474,7 +474,7 @@ async def arun_on_dataset(
Returns:
A dictionary containing the run's project name and the resulting model outputs.
"""
client_ = client or LangChainPlusClient()
client_ = client or LangSmithClient()
project_name = _get_project_name(project_name, llm_or_chain_factory, dataset_name)
dataset = client_.read_dataset(dataset_name=dataset_name)
examples = client_.list_examples(dataset_id=str(dataset.id))
@@ -501,7 +501,7 @@ def run_on_dataset(
num_repetitions: int = 1,
project_name: Optional[str] = None,
verbose: bool = False,
client: Optional[LangChainPlusClient] = None,
client: Optional[LangSmithClient] = None,
tags: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Run the chain on a dataset and store traces to the specified project name.
@@ -525,7 +525,7 @@ def run_on_dataset(
Returns:
A dictionary containing the run's project name and the resulting model outputs.
"""
client_ = client or LangChainPlusClient()
client_ = client or LangSmithClient()
project_name = _get_project_name(project_name, llm_or_chain_factory, dataset_name)
dataset = client_.read_dataset(dataset_name=dataset_name)
examples = client_.list_examples(dataset_id=str(dataset.id))

View File

@@ -3,8 +3,8 @@ from __future__ import annotations
from abc import abstractmethod
from typing import Any, Dict, List, Optional
from langchainplus_sdk import EvaluationResult, RunEvaluator
from langchainplus_sdk.schemas import Example, Run
from langsmith import EvaluationResult, RunEvaluator
from langsmith.schemas import Example, Run
from langchain.callbacks.manager import (
AsyncCallbackManagerForChainRun,

View File

@@ -1,7 +1,7 @@
from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
from langchainplus_sdk.evaluation import EvaluationResult
from langchainplus_sdk.schemas import Example, Run, RunTypeEnum
from langsmith.evaluation import EvaluationResult
from langsmith.schemas import Example, Run, RunTypeEnum
from pydantic import BaseModel, Field
from langchain.base_language import BaseLanguageModel

View File

@@ -2,7 +2,7 @@
import subprocess
from pathlib import Path
from langchainplus_sdk.cli.main import get_docker_compose_command
from langsmith.cli.main import get_docker_compose_command
def main() -> None:

34
poetry.lock generated
View File

@@ -4360,22 +4360,6 @@ dev = ["black", "pre-commit", "ruff"]
docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings[python]"]
tests = ["doctest", "pytest", "pytest-mock"]
[[package]]
name = "langchainplus-sdk"
version = "0.0.17"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchainplus_sdk-0.0.17-py3-none-any.whl", hash = "sha256:899675fe850bb0829691ce7643d5c3b4425de1535b6f2d6ce1e5f5457ffb05bf"},
{file = "langchainplus_sdk-0.0.17.tar.gz", hash = "sha256:6520c864a23dcadbe6fb7233a117347f6acc32725a97758e59354704c50de303"},
]
[package.dependencies]
pydantic = ">=1,<2"
requests = ">=2,<3"
tenacity = ">=8.1.0,<9.0.0"
[[package]]
name = "langcodes"
version = "3.3.0"
@@ -4409,6 +4393,22 @@ whylogs = "1.1.45.dev6"
[package.extras]
all = ["datasets (>=2.12.0,<3.0.0)", "nltk (>=3.8.1,<4.0.0)", "openai (>=0.27.6,<0.28.0)", "sentence-transformers (>=2.2.2,<3.0.0)", "torch"]
[[package]]
name = "langsmith"
version = "0.0.1"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langsmith-0.0.1-py3-none-any.whl", hash = "sha256:33003732de7aa2f22c5b002821af1b05456d86f0723ecfc9f238701daec6e45d"},
{file = "langsmith-0.0.1.tar.gz", hash = "sha256:0e5401a358901451e76625b98c618f14c97b1f187b4f38215d049b243a2e11c9"},
]
[package.dependencies]
pydantic = ">=1,<2"
requests = ">=2,<3"
tenacity = ">=8.1.0,<9.0.0"
[[package]]
name = "lark"
version = "1.1.5"
@@ -11771,4 +11771,4 @@ text-helpers = ["chardet"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "6e495e4f58127a5d2001385404b973896e275f5ca71a6ebe856cb114977189d1"
content-hash = "954c1c58503b17e48de75014771ce8ffe70e84577e0a77ed593f2e8ac290960c"

View File

@@ -106,11 +106,12 @@ pyspark = {version = "^3.4.0", optional = true}
clarifai = {version = "9.1.0", optional = true}
tigrisdb = {version = "^1.0.0b6", optional = true}
nebula3-python = {version = "^3.4.0", optional = true}
langchainplus-sdk = ">=0.0.17"
awadb = {version = "^0.3.3", optional = true}
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}
openllm = {version = ">=0.1.6", optional = true}
streamlit = {version = "^1.18.0", optional = true, python = ">=3.8.1,<3.9.7 || >3.9.7,<4.0"}
# TODO: Remove required dep once Run object schea and conversion is fixed for BaseTracer
langsmith = "^0.0.1"
[tool.poetry.group.docs.dependencies]
autodoc_pydantic = "^1.8.0"

View File

@@ -5,8 +5,8 @@ from typing import Any, Dict, List, Optional, Union
from unittest import mock
import pytest
from langchainplus_sdk.client import LangChainPlusClient
from langchainplus_sdk.schemas import Dataset, Example
from langsmith.client import Client as LangSmithClient
from langsmith.schemas import Dataset, Example
from langchain.base_language import BaseLanguageModel
from langchain.chains.base import Chain
@@ -180,15 +180,15 @@ async def test_arun_on_dataset(monkeypatch: pytest.MonkeyPatch) -> None:
pass
with mock.patch.object(
LangChainPlusClient, "read_dataset", new=mock_read_dataset
LangSmithClient, "read_dataset", new=mock_read_dataset
), mock.patch.object(
LangChainPlusClient, "list_examples", new=mock_list_examples
LangSmithClient, "list_examples", new=mock_list_examples
), mock.patch(
"langchain.client.runner_utils._arun_llm_or_chain", new=mock_arun_chain
), mock.patch.object(
LangChainPlusClient, "create_project", new=mock_create_project
LangSmithClient, "create_project", new=mock_create_project
):
client = LangChainPlusClient(api_url="http://localhost:1984", api_key="123")
client = LangSmithClient(api_url="http://localhost:1984", api_key="123")
chain = mock.MagicMock()
num_repetitions = 3
results = await arun_on_dataset(

View File

@@ -3,7 +3,7 @@
from uuid import UUID
import pytest
from langchainplus_sdk.schemas import Example, Run
from langsmith.schemas import Example, Run
from langchain.evaluation.run_evaluators import get_criteria_evaluator, get_qa_evaluator
from tests.unit_tests.llms.fake_llm import FakeLLM

View File

@@ -38,7 +38,7 @@ def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"aiohttp",
"async-timeout",
"dataclasses-json",
"langchainplus-sdk",
"langsmith",
"numexpr",
"numpy",
"openapi-schema-pydantic",