From e8ee781a422ee33509c1ac1003c2d31ecd083ef1 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Fri, 12 Jul 2024 23:14:24 +0200 Subject: [PATCH] ibm: move to external repo (#24208) --- libs/partners/ibm/.gitignore | 1 - libs/partners/ibm/LICENSE | 21 - libs/partners/ibm/Makefile | 58 - libs/partners/ibm/README.md | 145 +-- libs/partners/ibm/langchain_ibm/__init__.py | 5 - .../partners/ibm/langchain_ibm/chat_models.py | 915 ------------- libs/partners/ibm/langchain_ibm/embeddings.py | 173 --- libs/partners/ibm/langchain_ibm/llms.py | 428 ------ libs/partners/ibm/langchain_ibm/py.typed | 0 libs/partners/ibm/poetry.lock | 1150 ----------------- libs/partners/ibm/pyproject.toml | 81 -- libs/partners/ibm/scripts/check_imports.py | 17 - libs/partners/ibm/scripts/check_pydantic.sh | 27 - libs/partners/ibm/scripts/lint_imports.sh | 17 - libs/partners/ibm/tests/__init__.py | 0 .../ibm/tests/integration_tests/__init__.py | 0 .../integration_tests/test_chat_models.py | 243 ---- .../tests/integration_tests/test_compile.py | 7 - .../integration_tests/test_embeddings.py | 73 -- .../ibm/tests/integration_tests/test_llms.py | 292 ----- .../partners/ibm/tests/unit_tests/__init__.py | 0 .../ibm/tests/unit_tests/test_chat_models.py | 62 - .../ibm/tests/unit_tests/test_embeddings.py | 62 - .../ibm/tests/unit_tests/test_imports.py | 7 - .../ibm/tests/unit_tests/test_llms.py | 60 - 25 files changed, 2 insertions(+), 3842 deletions(-) delete mode 100644 libs/partners/ibm/.gitignore delete mode 100644 libs/partners/ibm/LICENSE delete mode 100644 libs/partners/ibm/Makefile delete mode 100644 libs/partners/ibm/langchain_ibm/__init__.py delete mode 100644 libs/partners/ibm/langchain_ibm/chat_models.py delete mode 100644 libs/partners/ibm/langchain_ibm/embeddings.py delete mode 100644 libs/partners/ibm/langchain_ibm/llms.py delete mode 100644 libs/partners/ibm/langchain_ibm/py.typed delete mode 100644 libs/partners/ibm/poetry.lock delete mode 100644 libs/partners/ibm/pyproject.toml delete mode 100644 libs/partners/ibm/scripts/check_imports.py delete mode 100755 libs/partners/ibm/scripts/check_pydantic.sh delete mode 100755 libs/partners/ibm/scripts/lint_imports.sh delete mode 100644 libs/partners/ibm/tests/__init__.py delete mode 100644 libs/partners/ibm/tests/integration_tests/__init__.py delete mode 100644 libs/partners/ibm/tests/integration_tests/test_chat_models.py delete mode 100644 libs/partners/ibm/tests/integration_tests/test_compile.py delete mode 100644 libs/partners/ibm/tests/integration_tests/test_embeddings.py delete mode 100644 libs/partners/ibm/tests/integration_tests/test_llms.py delete mode 100644 libs/partners/ibm/tests/unit_tests/__init__.py delete mode 100644 libs/partners/ibm/tests/unit_tests/test_chat_models.py delete mode 100644 libs/partners/ibm/tests/unit_tests/test_embeddings.py delete mode 100644 libs/partners/ibm/tests/unit_tests/test_imports.py delete mode 100644 libs/partners/ibm/tests/unit_tests/test_llms.py diff --git a/libs/partners/ibm/.gitignore b/libs/partners/ibm/.gitignore deleted file mode 100644 index bee8a64b79a..00000000000 --- a/libs/partners/ibm/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__ diff --git a/libs/partners/ibm/LICENSE b/libs/partners/ibm/LICENSE deleted file mode 100644 index 426b6509034..00000000000 --- a/libs/partners/ibm/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 LangChain, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/libs/partners/ibm/Makefile b/libs/partners/ibm/Makefile deleted file mode 100644 index f03e5e4c5e7..00000000000 --- a/libs/partners/ibm/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -.PHONY: all format lint test tests integration_tests docker_tests help extended_tests - -# Default target executed when no arguments are given to make. -all: help - -# Define a variable for the test file path. -TEST_FILE ?= tests/unit_tests/ - -integration_test integration_tests: TEST_FILE=tests/integration_tests/ - -test tests integration_test integration_tests: - poetry run pytest $(TEST_FILE) - - -###################### -# LINTING AND FORMATTING -###################### - -# Define a variable for Python and notebook files. -PYTHON_FILES=. -MYPY_CACHE=.mypy_cache -lint format: PYTHON_FILES=. -lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/ibm --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') -lint_package: PYTHON_FILES=langchain_ibm -lint_tests: PYTHON_FILES=tests -lint_tests: MYPY_CACHE=.mypy_cache_test - -lint lint_diff lint_package lint_tests: - poetry run ruff check . - poetry run ruff format $(PYTHON_FILES) --diff - poetry run ruff check --select I $(PYTHON_FILES) - mkdir $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) - -format format_diff: - poetry run ruff format $(PYTHON_FILES) - poetry run ruff check --select I --fix $(PYTHON_FILES) - -spell_check: - poetry run codespell --toml pyproject.toml - -spell_fix: - poetry run codespell --toml pyproject.toml -w - -check_imports: $(shell find langchain_ibm -name '*.py') - poetry run python ./scripts/check_imports.py $^ - -###################### -# HELP -###################### - -help: - @echo '----' - @echo 'check_imports - check imports' - @echo 'format - run code formatters' - @echo 'lint - run linters' - @echo 'test - run unit tests' - @echo 'tests - run unit tests' - @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/ibm/README.md b/libs/partners/ibm/README.md index b225d40491a..91cdbedf6c1 100644 --- a/libs/partners/ibm/README.md +++ b/libs/partners/ibm/README.md @@ -1,144 +1,3 @@ -# langchain-ibm +This package has moved! -This package provides the integration between LangChain and IBM watsonx.ai through the `ibm-watsonx-ai` SDK. - -## Installation - -To use the `langchain-ibm` package, follow these installation steps: - -```bash -pip install langchain-ibm -``` - -## Usage - -### Setting up - -To use IBM's models, you must have an IBM Cloud user API key. Here's how to obtain and set up your API key: - -1. **Obtain an API Key:** For more details on how to create and manage an API key, refer to IBM's [documentation](https://cloud.ibm.com/docs/account?topic=account-userapikey&interface=ui). -2. **Set the API Key as an Environment Variable:** For security reasons, it's recommended to not hard-code your API key directly in your scripts. Instead, set it up as an environment variable. You can use the following code to prompt for the API key and set it as an environment variable: - -```python -import os -from getpass import getpass - -watsonx_api_key = getpass() -os.environ["WATSONX_APIKEY"] = watsonx_api_key -``` - -In alternative, you can set the environment variable in your terminal. - -- **Linux/macOS:** Open your terminal and execute the following command: - ```bash - export WATSONX_APIKEY='your_ibm_api_key' - ``` - To make this environment variable persistent across terminal sessions, add the above line to your `~/.bashrc`, `~/.bash_profile`, or `~/.zshrc` file. - -- **Windows:** For Command Prompt, use: - ```cmd - set WATSONX_APIKEY=your_ibm_api_key - ``` - -### Loading the model - -You might need to adjust model parameters for different models or tasks. For more details on the parameters, refer to IBM's [documentation](https://ibm.github.io/watsonx-ai-python-sdk/fm_model.html#metanames.GenTextParamsMetaNames). - -```python -parameters = { - "decoding_method": "sample", - "max_new_tokens": 100, - "min_new_tokens": 1, - "temperature": 0.5, - "top_k": 50, - "top_p": 1, -} -``` - -Initialize the WatsonxLLM class with the previously set parameters. - -```python -from langchain_ibm import WatsonxLLM - -watsonx_llm = WatsonxLLM( - model_id="PASTE THE CHOSEN MODEL_ID HERE", - url="PASTE YOUR URL HERE", - project_id="PASTE YOUR PROJECT_ID HERE", - params=parameters, -) -``` - -**Note:** -- You must provide a `project_id` or `space_id`. For more information refer to IBM's [documentation](https://www.ibm.com/docs/en/watsonx-as-a-service?topic=projects). -- Depending on the region of your provisioned service instance, use one of the urls described [here](https://ibm.github.io/watsonx-ai-python-sdk/setup_cloud.html#authentication). -- You need to specify the model you want to use for inferencing through `model_id`. You can find the list of available models [here](https://ibm.github.io/watsonx-ai-python-sdk/fm_model.html#ibm_watsonx_ai.foundation_models.utils.enums.ModelTypes). - - -Alternatively you can use Cloud Pak for Data credentials. For more details, refer to IBM's [documentation](https://ibm.github.io/watsonx-ai-python-sdk/setup_cpd.html). - -```python -watsonx_llm = WatsonxLLM( - model_id="ibm/granite-13b-instruct-v2", - url="PASTE YOUR URL HERE", - username="PASTE YOUR USERNAME HERE", - password="PASTE YOUR PASSWORD HERE", - instance_id="openshift", - version="4.8", - project_id="PASTE YOUR PROJECT_ID HERE", - params=parameters, -) -``` - -### Create a Chain - -Create `PromptTemplate` objects which will be responsible for creating a random question. - -```python -from langchain.prompts import PromptTemplate - -template = "Generate a random question about {topic}: Question: " -prompt = PromptTemplate.from_template(template) -``` - -Provide a topic and run the LLMChain. - -```python -from langchain.chains import LLMChain - -llm_chain = LLMChain(prompt=prompt, llm=watsonx_llm) -response = llm_chain.invoke("dog") -print(response) -``` - -### Calling the Model Directly -To obtain completions, you can call the model directly using a string prompt. - -```python -# Calling a single prompt - -response = watsonx_llm.invoke("Who is man's best friend?") -print(response) -``` - -```python -# Calling multiple prompts - -response = watsonx_llm.generate( - [ - "The fastest dog in the world?", - "Describe your chosen dog breed", - ] -) -print(response) -``` - -### Streaming the Model output - -You can stream the model output. - -```python -for chunk in watsonx_llm.stream( - "Describe your favorite breed of dog and why it is your favorite." -): - print(chunk, end="") -``` +https://github.com/langchain-ai/langchain-ibm/tree/main/libs/ibm \ No newline at end of file diff --git a/libs/partners/ibm/langchain_ibm/__init__.py b/libs/partners/ibm/langchain_ibm/__init__.py deleted file mode 100644 index dbda2223fda..00000000000 --- a/libs/partners/ibm/langchain_ibm/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from langchain_ibm.chat_models import ChatWatsonx -from langchain_ibm.embeddings import WatsonxEmbeddings -from langchain_ibm.llms import WatsonxLLM - -__all__ = ["WatsonxLLM", "WatsonxEmbeddings", "ChatWatsonx"] diff --git a/libs/partners/ibm/langchain_ibm/chat_models.py b/libs/partners/ibm/langchain_ibm/chat_models.py deleted file mode 100644 index 96a51b42266..00000000000 --- a/libs/partners/ibm/langchain_ibm/chat_models.py +++ /dev/null @@ -1,915 +0,0 @@ -import json -import logging -import os -import re -from operator import itemgetter -from typing import ( - Any, - Callable, - Dict, - Iterator, - List, - Literal, - Mapping, - Optional, - Sequence, - Tuple, - Type, - TypedDict, - Union, - cast, -) - -from ibm_watsonx_ai import Credentials # type: ignore -from ibm_watsonx_ai.foundation_models import ModelInference # type: ignore -from langchain_core.callbacks import CallbackManagerForLLMRun -from langchain_core.language_models import LanguageModelInput -from langchain_core.language_models.chat_models import ( - BaseChatModel, - LangSmithParams, - generate_from_stream, -) -from langchain_core.messages import ( - AIMessage, - AIMessageChunk, - BaseMessage, - BaseMessageChunk, - ChatMessage, - ChatMessageChunk, - FunctionMessage, - FunctionMessageChunk, - HumanMessage, - HumanMessageChunk, - SystemMessage, - SystemMessageChunk, - ToolCallChunk, - ToolMessage, - ToolMessageChunk, - convert_to_messages, -) -from langchain_core.messages.tool import tool_call_chunk as create_tool_call_chunk -from langchain_core.output_parsers import JsonOutputParser, PydanticOutputParser -from langchain_core.output_parsers.base import OutputParserLike -from langchain_core.output_parsers.openai_tools import ( - JsonOutputKeyToolsParser, - PydanticToolsParser, - make_invalid_tool_call, - parse_tool_call, -) -from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from langchain_core.prompt_values import ChatPromptValue -from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator -from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough -from langchain_core.tools import BaseTool -from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env -from langchain_core.utils.function_calling import ( - convert_to_openai_function, - convert_to_openai_tool, -) - -logger = logging.getLogger(__name__) - - -def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage: - """Convert a dictionary to a LangChain message. - - Args: - _dict: The dictionary. - - Returns: - The LangChain message. - """ - role = _dict.get("role") - if role == "user": - return HumanMessage(content=_dict.get("generated_text", "")) - else: - additional_kwargs: Dict = {} - tool_calls = [] - invalid_tool_calls = [] - try: - content = "" - - raw_tool_calls = _dict.get("generated_text") - if raw_tool_calls: - json_parts = re.split(r"\n\n(?:\n\n)?", raw_tool_calls) - parsed_raw_tool_calls = [ - json.loads(part) for part in json_parts if part.strip() - ] - additional_kwargs["tool_calls"] = parsed_raw_tool_calls - additional_kwargs["function_call"] = dict(parsed_raw_tool_calls) - - for obj in parsed_raw_tool_calls: - b = json.dumps(obj["function"]["arguments"]) - obj["function"]["arguments"] = b - - for raw_tool_call in parsed_raw_tool_calls: - try: - raw_tool_call["id"] = "None" - tool_calls.append( - parse_tool_call(raw_tool_call, return_id=True) - ) - except Exception as e: - invalid_tool_calls.append( - dict(make_invalid_tool_call(raw_tool_call, str(e))) - ) - except: # noqa: E722 - content = _dict.get("generated_text", "") or "" - - return AIMessage( - content=content, - additional_kwargs=additional_kwargs, - tool_calls=tool_calls, - invalid_tool_calls=invalid_tool_calls, - ) - - -def _convert_message_to_dict(message: BaseMessage) -> dict: - """Convert a LangChain message to a dictionary. - - Args: - message: The LangChain message. - - Returns: - The dictionary. - """ - message_dict: Dict[str, Any] - if isinstance(message, ChatMessage): - message_dict = {"role": message.role, "content": message.content} - elif isinstance(message, HumanMessage): - message_dict = {"role": "user", "content": message.content} - elif isinstance(message, AIMessage): - message_dict = {"role": "assistant", "content": message.content} - if "function_call" in message.additional_kwargs: - message_dict["function_call"] = message.additional_kwargs["function_call"] - # If function call only, content is None not empty string - if message_dict["content"] == "": - message_dict["content"] = None - if "tool_calls" in message.additional_kwargs: - message_dict["tool_calls"] = message.additional_kwargs["tool_calls"] - # If tool calls only, content is None not empty string - if message_dict["content"] == "": - message_dict["content"] = None - elif isinstance(message, SystemMessage): - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, FunctionMessage): - message_dict = { - "role": "function", - "content": message.content, - "name": message.name, - } - elif isinstance(message, ToolMessage): - message_dict = { - "role": "tool", - "content": message.content, - "tool_call_id": "None", - } - else: - raise TypeError(f"Got unknown type {message}") - if "name" in message.additional_kwargs: - message_dict["name"] = message.additional_kwargs["name"] - return message_dict - - -def _convert_delta_to_message_chunk( - _dict: Mapping[str, Any], default_class: Type[BaseMessageChunk] -) -> BaseMessageChunk: - role = cast(str, _dict.get("role")) - content = cast(str, _dict.get("content") or "") - additional_kwargs: Dict = {} - tool_call_chunks: List[ToolCallChunk] = [] - if _dict.get("function_call"): - function_call = dict(_dict["function_call"]) - if "name" in function_call and function_call["name"] is None: - function_call["name"] = "" - additional_kwargs["function_call"] = function_call - if raw_tool_calls := _dict.get("tool_calls"): - additional_kwargs["tool_calls"] = raw_tool_calls - for rtc in raw_tool_calls: - try: - tool_call_chunks.append( - create_tool_call_chunk( - name=rtc["function"].get("name"), - args=rtc["function"].get("arguments"), - id=rtc.get("id"), - index=rtc.get("index"), - ) - ) - except KeyError: - pass - if role == "user" or default_class == HumanMessageChunk: - return HumanMessageChunk(content=content) - elif role == "assistant" or default_class == AIMessageChunk: - return AIMessageChunk( - content=content, - additional_kwargs=additional_kwargs, - tool_call_chunks=tool_call_chunks, # type: ignore[arg-type] - ) - elif role == "system" or default_class == SystemMessageChunk: - return SystemMessageChunk(content=content) - elif role == "function" or default_class == FunctionMessageChunk: - return FunctionMessageChunk(content=content, name=_dict["name"]) - elif role == "tool" or default_class == ToolMessageChunk: - return ToolMessageChunk(content=content, tool_call_id=_dict["tool_call_id"]) - elif role or default_class == ChatMessageChunk: - return ChatMessageChunk(content=content, role=role) - else: - return default_class(content=content) # type: ignore - - -class _FunctionCall(TypedDict): - name: str - - -class ChatWatsonx(BaseChatModel): - """ - IBM watsonx.ai large language chat models. - - To use, you should have ``langchain_ibm`` python package installed, - and the environment variable ``WATSONX_APIKEY`` set with your API key, or pass - it as a named parameter to the constructor. - - - Example: - .. code-block:: python - - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 100, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.TEMPERATURE: 0.5, - GenTextParamsMetaNames.TOP_K: 50, - GenTextParamsMetaNames.TOP_P: 1, - } - - from langchain_ibm import ChatWatsonx - watsonx_llm = ChatWatsonx( - model_id="meta-llama/llama-3-70b-instruct", - url="https://us-south.ml.cloud.ibm.com", - apikey="*****", - project_id="*****", - params=parameters, - ) - """ - - model_id: str = "" - """Type of model to use.""" - - deployment_id: str = "" - """Type of deployed model to use.""" - - project_id: str = "" - """ID of the Watson Studio project.""" - - space_id: str = "" - """ID of the Watson Studio space.""" - - url: Optional[SecretStr] = None - """Url to Watson Machine Learning or CPD instance""" - - apikey: Optional[SecretStr] = None - """Apikey to Watson Machine Learning or CPD instance""" - - token: Optional[SecretStr] = None - """Token to CPD instance""" - - password: Optional[SecretStr] = None - """Password to CPD instance""" - - username: Optional[SecretStr] = None - """Username to CPD instance""" - - instance_id: Optional[SecretStr] = None - """Instance_id of CPD instance""" - - version: Optional[SecretStr] = None - """Version of CPD instance""" - - params: Optional[dict] = None - """Chat Model parameters to use during generate requests.""" - - verify: Union[str, bool] = "" - """User can pass as verify one of following: - the path to a CA_BUNDLE file - the path of directory with certificates of trusted CAs - True - default path to truststore will be taken - False - no verification will be made""" - - streaming: bool = False - """ Whether to stream the results or not. """ - - watsonx_model: ModelInference = Field(default=None, exclude=True) #: :meta private: - - class Config: - """Configuration for this pydantic object.""" - - allow_population_by_field_name = True - - @classmethod - def is_lc_serializable(cls) -> bool: - return False - - @property - def _llm_type(self) -> str: - """Return type of chat model.""" - return "watsonx-chat" - - def _get_ls_params( - self, stop: Optional[List[str]] = None, **kwargs: Any - ) -> LangSmithParams: - """Get standard params for tracing.""" - params = super()._get_ls_params(stop=stop, **kwargs) - params["ls_provider"] = "together" - params["ls_model_name"] = self.model_id - return params - - @property - def lc_secrets(self) -> Dict[str, str]: - """A map of constructor argument names to secret ids. - - For example: - { - "url": "WATSONX_URL", - "apikey": "WATSONX_APIKEY", - "token": "WATSONX_TOKEN", - "password": "WATSONX_PASSWORD", - "username": "WATSONX_USERNAME", - "instance_id": "WATSONX_INSTANCE_ID", - } - """ - return { - "url": "WATSONX_URL", - "apikey": "WATSONX_APIKEY", - "token": "WATSONX_TOKEN", - "password": "WATSONX_PASSWORD", - "username": "WATSONX_USERNAME", - "instance_id": "WATSONX_INSTANCE_ID", - } - - @root_validator() - def validate_environment(cls, values: Dict) -> Dict: - """Validate that credentials and python package exists in environment.""" - values["url"] = convert_to_secret_str( - get_from_dict_or_env(values, "url", "WATSONX_URL") - ) - if "cloud.ibm.com" in values.get("url", "").get_secret_value(): - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - else: - if ( - not values["token"] - and "WATSONX_TOKEN" not in os.environ - and not values["password"] - and "WATSONX_PASSWORD" not in os.environ - and not values["apikey"] - and "WATSONX_APIKEY" not in os.environ - ): - raise ValueError( - "Did not find 'token', 'password' or 'apikey'," - " please add an environment variable" - " `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' " - "which contains it," - " or pass 'token', 'password' or 'apikey'" - " as a named parameter." - ) - elif values["token"] or "WATSONX_TOKEN" in os.environ: - values["token"] = convert_to_secret_str( - get_from_dict_or_env(values, "token", "WATSONX_TOKEN") - ) - elif values["password"] or "WATSONX_PASSWORD" in os.environ: - values["password"] = convert_to_secret_str( - get_from_dict_or_env(values, "password", "WATSONX_PASSWORD") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - elif values["apikey"] or "WATSONX_APIKEY" in os.environ: - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - if not values["instance_id"] or "WATSONX_INSTANCE_ID" not in os.environ: - values["instance_id"] = convert_to_secret_str( - get_from_dict_or_env(values, "instance_id", "WATSONX_INSTANCE_ID") - ) - credentials = Credentials( - url=values["url"].get_secret_value() if values["url"] else None, - api_key=values["apikey"].get_secret_value() if values["apikey"] else None, - token=values["token"].get_secret_value() if values["token"] else None, - password=values["password"].get_secret_value() - if values["password"] - else None, - username=values["username"].get_secret_value() - if values["username"] - else None, - instance_id=values["instance_id"].get_secret_value() - if values["instance_id"] - else None, - version=values["version"].get_secret_value() if values["version"] else None, - verify=values["verify"], - ) - - watsonx_chat = ModelInference( - model_id=values["model_id"], - deployment_id=values["deployment_id"], - credentials=credentials, - params=values["params"], - project_id=values["project_id"], - space_id=values["space_id"], - ) - values["watsonx_model"] = watsonx_chat - - return values - - def _generate( - self, - messages: List[BaseMessage], - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - stream: Optional[bool] = None, - **kwargs: Any, - ) -> ChatResult: - should_stream = stream if stream is not None else self.streaming - if should_stream: - stream_iter = self._stream( - messages, stop=stop, run_manager=run_manager, **kwargs - ) - return generate_from_stream(stream_iter) - - message_dicts, params = self._create_message_dicts(messages, stop, **kwargs) - chat_prompt = self._create_chat_prompt(message_dicts) - - tools = kwargs.get("tools") - - if tools: - chat_prompt = f"""[AVAILABLE_TOOLS] -{json.dumps(tools[0], indent=2)} -[/AVAILABLE_TOOLS] -[INST]<>You are Mixtral Chat function calling, an AI language model developed by -Mistral AI. You are a cautious assistant. You carefully follow instructions. You are -helpful and harmless and you follow ethical guidelines and promote positive behavior. -<> - -To use these tools you must always respond in JSON format containing `"type"` and -`"function"` key-value pairs. Also `"function"` key-value pair always containing -`"name"` and `"arguments"` key-value pairs. - -Between subsequent JSONs should be one blank line. - -Remember, even when answering to the user, you must still use this only JSON format! - -{chat_prompt}[/INST]""" - - if "tools" in kwargs: - del kwargs["tools"] - if "tool_choice" in kwargs: - del kwargs["tool_choice"] - - response = self.watsonx_model.generate( - prompt=chat_prompt, **(kwargs | {"params": params}) - ) - return self._create_chat_result(response) - - def _stream( - self, - messages: List[BaseMessage], - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, - ) -> Iterator[ChatGenerationChunk]: - message_dicts, params = self._create_message_dicts(messages, stop, **kwargs) - chat_prompt = self._create_chat_prompt(message_dicts) - - tools = kwargs.get("tools") - - if tools: - chat_prompt = f"""[AVAILABLE_TOOLS] -{json.dumps(tools[0], indent=2)} -[/AVAILABLE_TOOLS] -[INST]<>You are Mixtral Chat function calling, an AI language model developed by -Mistral AI. You are a cautious assistant. You carefully follow instructions. You are -helpful and harmless and you follow ethical guidelines and promote positive behavior. -<> - -To use these tools you must always respond in JSON format containing `"type"` and -`"function"` key-value pairs. Also `"function"` key-value pair always containing -`"name"` and `"arguments"` key-value pairs. - -Between subsequent JSONs should be one blank line. - -Remember, even when answering to the user, you must still use this only JSON format! - -{chat_prompt}[/INST]""" - - if "tools" in kwargs: - del kwargs["tools"] - if "tool_choice" in kwargs: - del kwargs["tool_choice"] - - for chunk in self.watsonx_model.generate_text_stream( - prompt=chat_prompt, raw_response=True, **(kwargs | {"params": params}) - ): - if not isinstance(chunk, dict): - chunk = chunk.dict() - if len(chunk["results"]) == 0: - continue - choice = chunk["results"][0] - - chunk = AIMessageChunk( - content=choice["generated_text"], - ) - generation_info = {} - if finish_reason := choice.get("stop_reason"): - generation_info["finish_reason"] = finish_reason - logprobs = choice.get("logprobs") - if logprobs: - generation_info["logprobs"] = logprobs - chunk = ChatGenerationChunk( - message=chunk, generation_info=generation_info or None - ) - if run_manager: - run_manager.on_llm_new_token( - chunk.content, chunk=chunk, logprobs=logprobs - ) - - yield chunk - - def _create_chat_prompt(self, messages: List[Dict[str, Any]]) -> str: - prompt = "" - - if self.model_id in ["ibm/granite-13b-chat-v1", "ibm/granite-13b-chat-v2"]: - for message in messages: - if message["role"] == "system": - prompt += "<|system|>\n" + message["content"] + "\n\n" - elif message["role"] == "assistant": - prompt += "<|assistant|>\n" + message["content"] + "\n\n" - elif message["role"] == "function": - prompt += "<|function|>\n" + message["content"] + "\n\n" - elif message["role"] == "tool": - prompt += "<|tool|>\n" + message["content"] + "\n\n" - else: - prompt += "<|user|>:\n" + message["content"] + "\n\n" - - prompt += "<|assistant|>\n" - - elif self.model_id in [ - "meta-llama/llama-2-13b-chat", - "meta-llama/llama-2-70b-chat", - ]: - for message in messages: - if message["role"] == "system": - prompt += "[INST] <>\n" + message["content"] + "<>\n\n" - elif message["role"] == "assistant": - prompt += message["content"] + "\n[INST]\n\n" - else: - prompt += message["content"] + "\n[/INST]\n" - - else: - prompt = ChatPromptValue( - messages=convert_to_messages(messages) + [AIMessage(content="")] - ).to_string() - - return prompt - - def _create_message_dicts( - self, messages: List[BaseMessage], stop: Optional[List[str]], **kwargs: Any - ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: - params = {**self.params} if self.params else {} - params = params | {**kwargs.get("params", {})} - if stop is not None: - if params and "stop_sequences" in params: - raise ValueError( - "`stop_sequences` found in both the input and default params." - ) - params = (params or {}) | {"stop_sequences": stop} - message_dicts = [_convert_message_to_dict(m) for m in messages] - return message_dicts, params - - def _create_chat_result(self, response: Union[dict]) -> ChatResult: - generations = [] - sum_of_total_generated_tokens = 0 - sum_of_total_input_tokens = 0 - - if response.get("error"): - raise ValueError(response.get("error")) - - for res in response["results"]: - message = _convert_dict_to_message(res) - generation_info = dict(finish_reason=res.get("stop_reason")) - if "logprobs" in res: - generation_info["logprobs"] = res["logprobs"] - if "generated_token_count" in res: - sum_of_total_generated_tokens += res["generated_token_count"] - if "input_token_count" in res: - sum_of_total_input_tokens += res["input_token_count"] - total_token = sum_of_total_generated_tokens + sum_of_total_input_tokens - if total_token and isinstance(message, AIMessage): - message.usage_metadata = { - "input_tokens": sum_of_total_input_tokens, - "output_tokens": sum_of_total_generated_tokens, - "total_tokens": total_token, - } - gen = ChatGeneration( - message=message, - generation_info=generation_info, - ) - generations.append(gen) - token_usage = { - "generated_token_count": sum_of_total_generated_tokens, - "input_token_count": sum_of_total_input_tokens, - } - llm_output = { - "token_usage": token_usage, - "model_name": self.model_id, - "system_fingerprint": response.get("system_fingerprint", ""), - } - return ChatResult(generations=generations, llm_output=llm_output) - - def bind_functions( - self, - functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], - function_call: Optional[ - Union[_FunctionCall, str, Literal["auto", "none"]] - ] = None, - **kwargs: Any, - ) -> Runnable[LanguageModelInput, BaseMessage]: - """Bind functions (and other objects) to this chat model. - - Assumes model is compatible with IBM watsonx.ai function-calling API. - - Args: - functions: A list of function definitions to bind to this chat model. - Can be a dictionary, pydantic model, or callable. Pydantic - models and callables will be automatically converted to - their schema dictionary representation. - function_call: Which function to require the model to call. - Must be the name of the single provided function or - "auto" to automatically determine which function to call - (if any). - **kwargs: Any additional parameters to pass to the - :class:`~langchain.runnable.Runnable` constructor. - """ - - formatted_functions = [convert_to_openai_function(fn) for fn in functions] - if function_call is not None: - function_call = ( - {"name": function_call} - if isinstance(function_call, str) - and function_call not in ("auto", "none") - else function_call - ) - if isinstance(function_call, dict) and len(formatted_functions) != 1: - raise ValueError( - "When specifying `function_call`, you must provide exactly one " - "function." - ) - if ( - isinstance(function_call, dict) - and formatted_functions[0]["name"] != function_call["name"] - ): - raise ValueError( - f"Function call {function_call} was specified, but the only " - f"provided function was {formatted_functions[0]['name']}." - ) - kwargs = {**kwargs, "function_call": function_call} - return super().bind( - functions=formatted_functions, - **kwargs, - ) - - def bind_tools( - self, - tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], - **kwargs: Any, - ) -> Runnable[LanguageModelInput, BaseMessage]: - """Bind tool-like objects to this chat model. - - Args: - tools: A list of tool definitions to bind to this chat model. - Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic - models, callables, and BaseTools will be automatically converted to - their schema dictionary representation. - **kwargs: Any additional parameters to pass to the - :class:`~langchain.runnable.Runnable` constructor. - """ - bind_tools_supported_models = ["mistralai/mixtral-8x7b-instruct-v01"] - if self.model_id not in bind_tools_supported_models: - raise Warning( - f"bind_tools() method for ChatWatsonx support only " - f"following models: {bind_tools_supported_models}" - ) - - formatted_tools = [convert_to_openai_tool(tool) for tool in tools] - - return super().bind(tools=formatted_tools, **kwargs) - - def with_structured_output( - self, - schema: Optional[Union[Dict, Type[BaseModel]]] = None, - *, - method: Literal["function_calling", "json_mode"] = "function_calling", - include_raw: bool = False, - **kwargs: Any, - ) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]: - """Model wrapper that returns outputs formatted to match the given schema. - - Args: - schema: The output schema as a dict or a Pydantic class. If a Pydantic class - then the model output will be an object of that class. If a dict then - the model output will be a dict. With a Pydantic class the returned - attributes will be validated, whereas with a dict they will not be. If - `method` is "function_calling" and `schema` is a dict, then the dict - must match the IBM watsonx.ai function-calling spec. - method: The method for steering model generation, either "function_calling" - or "json_mode". If "function_calling" then the schema will be converted - to an IBM watsonx.ai function and the returned model will make use of the - function-calling API. If "json_mode" then IBM watsonx.ai's JSON mode will be - used. Note that if using "json_mode" then you must include instructions - for formatting the output into the desired schema into the model call. - include_raw: If False then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If True - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys "raw", "parsed", and "parsing_error". - - Returns: - A Runnable that takes any ChatModel input and returns as output: - - If include_raw is True then a dict with keys: - raw: BaseMessage - parsed: Optional[_DictOrPydantic] - parsing_error: Optional[BaseException] - - If include_raw is False then just _DictOrPydantic is returned, - where _DictOrPydantic depends on the schema: - - If schema is a Pydantic class then _DictOrPydantic is the Pydantic - class. - - If schema is a dict then _DictOrPydantic is a dict. - - Example: Function-calling, Pydantic schema (method="function_calling", include_raw=False): - .. code-block:: python - - from langchain_ibm import ChatWatsonx - from langchain_core.pydantic_v1 import BaseModel - - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str - - llm = ChatWatsonx(...) - structured_llm = llm.with_structured_output(AnswerWithJustification) - - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - - Example: Function-calling, Pydantic schema (method="function_calling", include_raw=True): - .. code-block:: python - - from langchain_ibm import ChatWatsonx - from langchain_core.pydantic_v1 import BaseModel - - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str - - llm = ChatWatsonx(...) - structured_llm = llm.with_structured_output(AnswerWithJustification, include_raw=True) - - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - - Example: Function-calling, dict schema (method="function_calling", include_raw=False): - .. code-block:: python - - from langchain_ibm import ChatWatsonx - from langchain_core.pydantic_v1 import BaseModel - from langchain_core.utils.function_calling import convert_to_openai_tool - - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str - - dict_schema = convert_to_openai_tool(AnswerWithJustification) - llm = ChatWatsonx(...) - structured_llm = llm.with_structured_output(dict_schema) - - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - - Example: JSON mode, Pydantic schema (method="json_mode", include_raw=True): - .. code-block:: - - from langchain_ibm import ChatWatsonx - from langchain_core.pydantic_v1 import BaseModel - - class AnswerWithJustification(BaseModel): - answer: str - justification: str - - llm = ChatWatsonx(...) - structured_llm = llm.with_structured_output( - AnswerWithJustification, - method="json_mode", - include_raw=True - ) - - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'.\n\n" - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{\n "answer": "They are both the same weight.",\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \n}'), - # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), - # 'parsing_error': None - # } - - Example: JSON mode, no schema (schema=None, method="json_mode", include_raw=True): - .. code-block:: - - from langchain_ibm import ChatWatsonx - - structured_llm = llm.with_structured_output(method="json_mode", include_raw=True) - - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'.\n\n" - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{\n "answer": "They are both the same weight.",\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \n}'), - # 'parsed': { - # 'answer': 'They are both the same weight.', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.' - # }, - # 'parsing_error': None - # } - """ # noqa: E501 - if kwargs: - raise ValueError(f"Received unsupported arguments {kwargs}") - is_pydantic_schema = _is_pydantic_class(schema) - if method == "function_calling": - if schema is None: - raise ValueError( - "schema must be specified when method is 'function_calling'. " - "Received None." - ) - llm = self.bind_tools([schema], tool_choice=True) - if is_pydantic_schema: - output_parser: OutputParserLike = PydanticToolsParser( - tools=[schema], # type: ignore[list-item] - first_tool_only=True, # type: ignore[list-item] - ) - else: - key_name = convert_to_openai_tool(schema)["function"]["name"] - output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True - ) - elif method == "json_mode": - llm = self.bind(response_format={"type": "json_object"}) - output_parser = ( - PydanticOutputParser(pydantic_object=schema) # type: ignore[type-var, arg-type] - if is_pydantic_schema - else JsonOutputParser() - ) - else: - raise ValueError( - f"Unrecognized method argument. Expected one of 'function_calling' or " - f"'json_format'. Received: '{method}'" - ) - - if include_raw: - parser_assign = RunnablePassthrough.assign( - parsed=itemgetter("raw") | output_parser, parsing_error=lambda _: None - ) - parser_none = RunnablePassthrough.assign(parsed=lambda _: None) - parser_with_fallback = parser_assign.with_fallbacks( - [parser_none], exception_key="parsing_error" - ) - return RunnableMap(raw=llm) | parser_with_fallback - else: - return llm | output_parser - - -def _is_pydantic_class(obj: Any) -> bool: - return isinstance(obj, type) and issubclass(obj, BaseModel) diff --git a/libs/partners/ibm/langchain_ibm/embeddings.py b/libs/partners/ibm/langchain_ibm/embeddings.py deleted file mode 100644 index 63549f3e242..00000000000 --- a/libs/partners/ibm/langchain_ibm/embeddings.py +++ /dev/null @@ -1,173 +0,0 @@ -import os -from typing import Dict, List, Optional, Union - -from ibm_watsonx_ai import APIClient, Credentials # type: ignore -from ibm_watsonx_ai.foundation_models.embeddings import Embeddings # type: ignore -from langchain_core.embeddings import Embeddings as LangChainEmbeddings -from langchain_core.pydantic_v1 import ( - BaseModel, - Extra, - Field, - SecretStr, - root_validator, -) -from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env - - -class WatsonxEmbeddings(BaseModel, LangChainEmbeddings): - """IBM WatsonX.ai embedding models.""" - - model_id: str = "" - """Type of model to use.""" - - project_id: str = "" - """ID of the Watson Studio project.""" - - space_id: str = "" - """ID of the Watson Studio space.""" - - url: Optional[SecretStr] = None - """Url to Watson Machine Learning or CPD instance""" - - apikey: Optional[SecretStr] = None - """Apikey to Watson Machine Learning or CPD instance""" - - token: Optional[SecretStr] = None - """Token to CPD instance""" - - password: Optional[SecretStr] = None - """Password to CPD instance""" - - username: Optional[SecretStr] = None - """Username to CPD instance""" - - instance_id: Optional[SecretStr] = None - """Instance_id of CPD instance""" - - version: Optional[SecretStr] = None - """Version of CPD instance""" - - params: Optional[dict] = None - """Model parameters to use during generate requests.""" - - verify: Union[str, bool, None] = None - """User can pass as verify one of following: - the path to a CA_BUNDLE file - the path of directory with certificates of trusted CAs - True - default path to truststore will be taken - False - no verification will be made""" - - watsonx_embed: Embeddings = Field(default=None) #: :meta private: - - watsonx_client: APIClient = Field(default=None) #: :meta private: - - class Config: - """Configuration for this pydantic object.""" - - extra = Extra.forbid - arbitrary_types_allowed = True - - @root_validator() - def validate_environment(cls, values: Dict) -> Dict: - """Validate that credentials and python package exists in environment.""" - if isinstance(values.get("watsonx_client"), APIClient): - watsonx_embed = Embeddings( - model_id=values["model_id"], - params=values["params"], - api_client=values["watsonx_client"], - project_id=values["project_id"], - space_id=values["space_id"], - verify=values["verify"], - ) - values["watsonx_embed"] = watsonx_embed - - else: - values["url"] = convert_to_secret_str( - get_from_dict_or_env(values, "url", "WATSONX_URL") - ) - if "cloud.ibm.com" in values.get("url", "").get_secret_value(): - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - else: - if ( - not values["token"] - and "WATSONX_TOKEN" not in os.environ - and not values["password"] - and "WATSONX_PASSWORD" not in os.environ - and not values["apikey"] - and "WATSONX_APIKEY" not in os.environ - ): - raise ValueError( - "Did not find 'token', 'password' or 'apikey'," - " please add an environment variable" - " `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' " - "which contains it," - " or pass 'token', 'password' or 'apikey'" - " as a named parameter." - ) - elif values["token"] or "WATSONX_TOKEN" in os.environ: - values["token"] = convert_to_secret_str( - get_from_dict_or_env(values, "token", "WATSONX_TOKEN") - ) - elif values["password"] or "WATSONX_PASSWORD" in os.environ: - values["password"] = convert_to_secret_str( - get_from_dict_or_env(values, "password", "WATSONX_PASSWORD") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - elif values["apikey"] or "WATSONX_APIKEY" in os.environ: - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - if not values["instance_id"] or "WATSONX_INSTANCE_ID" not in os.environ: - values["instance_id"] = convert_to_secret_str( - get_from_dict_or_env( - values, "instance_id", "WATSONX_INSTANCE_ID" - ) - ) - - credentials = Credentials( - url=values["url"].get_secret_value() if values["url"] else None, - api_key=values["apikey"].get_secret_value() - if values["apikey"] - else None, - token=values["token"].get_secret_value() if values["token"] else None, - password=values["password"].get_secret_value() - if values["password"] - else None, - username=values["username"].get_secret_value() - if values["username"] - else None, - instance_id=values["instance_id"].get_secret_value() - if values["instance_id"] - else None, - version=values["version"].get_secret_value() - if values["version"] - else None, - verify=values["verify"], - ) - - watsonx_embed = Embeddings( - model_id=values["model_id"], - params=values["params"], - credentials=credentials, - project_id=values["project_id"], - space_id=values["space_id"], - ) - - values["watsonx_embed"] = watsonx_embed - - return values - - def embed_documents(self, texts: List[str]) -> List[List[float]]: - """Embed search docs.""" - return self.watsonx_embed.embed_documents(texts=texts) - - def embed_query(self, text: str) -> List[float]: - """Embed query text.""" - return self.embed_documents([text])[0] diff --git a/libs/partners/ibm/langchain_ibm/llms.py b/libs/partners/ibm/langchain_ibm/llms.py deleted file mode 100644 index 5ed987465c9..00000000000 --- a/libs/partners/ibm/langchain_ibm/llms.py +++ /dev/null @@ -1,428 +0,0 @@ -import logging -import os -from typing import Any, Dict, Iterator, List, Mapping, Optional, Union - -from ibm_watsonx_ai import Credentials # type: ignore -from ibm_watsonx_ai.foundation_models import Model, ModelInference # type: ignore -from langchain_core.callbacks import CallbackManagerForLLMRun -from langchain_core.language_models.llms import BaseLLM -from langchain_core.outputs import Generation, GenerationChunk, LLMResult -from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator -from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env - -logger = logging.getLogger(__name__) - - -class WatsonxLLM(BaseLLM): - """ - IBM WatsonX.ai large language models. - - To use, you should have ``langchain_ibm`` python package installed, - and the environment variable ``WATSONX_APIKEY`` set with your API key, or pass - it as a named parameter to the constructor. - - - Example: - .. code-block:: python - - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 100, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.TEMPERATURE: 0.5, - GenTextParamsMetaNames.TOP_K: 50, - GenTextParamsMetaNames.TOP_P: 1, - } - - from langchain_ibm import WatsonxLLM - watsonx_llm = WatsonxLLM( - model_id="google/flan-ul2", - url="https://us-south.ml.cloud.ibm.com", - apikey="*****", - project_id="*****", - params=parameters, - ) - """ - - model_id: str = "" - """Type of model to use.""" - - deployment_id: str = "" - """Type of deployed model to use.""" - - project_id: str = "" - """ID of the Watson Studio project.""" - - space_id: str = "" - """ID of the Watson Studio space.""" - - url: Optional[SecretStr] = None - """Url to Watson Machine Learning or CPD instance""" - - apikey: Optional[SecretStr] = None - """Apikey to Watson Machine Learning or CPD instance""" - - token: Optional[SecretStr] = None - """Token to CPD instance""" - - password: Optional[SecretStr] = None - """Password to CPD instance""" - - username: Optional[SecretStr] = None - """Username to CPD instance""" - - instance_id: Optional[SecretStr] = None - """Instance_id of CPD instance""" - - version: Optional[SecretStr] = None - """Version of CPD instance""" - - params: Optional[dict] = None - """Model parameters to use during generate requests.""" - - verify: Union[str, bool, None] = None - """User can pass as verify one of following: - the path to a CA_BUNDLE file - the path of directory with certificates of trusted CAs - True - default path to truststore will be taken - False - no verification will be made""" - - streaming: bool = False - """ Whether to stream the results or not. """ - - watsonx_model: ModelInference = Field(default=None, exclude=True) #: :meta private: - - class Config: - """Configuration for this pydantic object.""" - - extra = Extra.forbid - - @classmethod - def is_lc_serializable(cls) -> bool: - return False - - @property - def lc_secrets(self) -> Dict[str, str]: - """A map of constructor argument names to secret ids. - - For example: - { - "url": "WATSONX_URL", - "apikey": "WATSONX_APIKEY", - "token": "WATSONX_TOKEN", - "password": "WATSONX_PASSWORD", - "username": "WATSONX_USERNAME", - "instance_id": "WATSONX_INSTANCE_ID", - } - """ - return { - "url": "WATSONX_URL", - "apikey": "WATSONX_APIKEY", - "token": "WATSONX_TOKEN", - "password": "WATSONX_PASSWORD", - "username": "WATSONX_USERNAME", - "instance_id": "WATSONX_INSTANCE_ID", - } - - @root_validator() - def validate_environment(cls, values: Dict) -> Dict: - """Validate that credentials and python package exists in environment.""" - if isinstance(values.get("watsonx_model"), (ModelInference, Model)): - values["model_id"] = getattr(values["watsonx_model"], "model_id") - values["deployment_id"] = getattr( - values["watsonx_model"], "deployment_id", "" - ) - values["project_id"] = getattr( - getattr(values["watsonx_model"], "_client"), - "default_project_id", - ) - values["space_id"] = getattr( - getattr(values["watsonx_model"], "_client"), "default_space_id" - ) - values["params"] = getattr(values["watsonx_model"], "params") - else: - values["url"] = convert_to_secret_str( - get_from_dict_or_env(values, "url", "WATSONX_URL") - ) - if "cloud.ibm.com" in values.get("url", "").get_secret_value(): - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - else: - if ( - not values["token"] - and "WATSONX_TOKEN" not in os.environ - and not values["password"] - and "WATSONX_PASSWORD" not in os.environ - and not values["apikey"] - and "WATSONX_APIKEY" not in os.environ - ): - raise ValueError( - "Did not find 'token', 'password' or 'apikey'," - " please add an environment variable" - " `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' " - "which contains it," - " or pass 'token', 'password' or 'apikey'" - " as a named parameter." - ) - elif values["token"] or "WATSONX_TOKEN" in os.environ: - values["token"] = convert_to_secret_str( - get_from_dict_or_env(values, "token", "WATSONX_TOKEN") - ) - elif values["password"] or "WATSONX_PASSWORD" in os.environ: - values["password"] = convert_to_secret_str( - get_from_dict_or_env(values, "password", "WATSONX_PASSWORD") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - elif values["apikey"] or "WATSONX_APIKEY" in os.environ: - values["apikey"] = convert_to_secret_str( - get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") - ) - values["username"] = convert_to_secret_str( - get_from_dict_or_env(values, "username", "WATSONX_USERNAME") - ) - if not values["instance_id"] or "WATSONX_INSTANCE_ID" not in os.environ: - values["instance_id"] = convert_to_secret_str( - get_from_dict_or_env( - values, "instance_id", "WATSONX_INSTANCE_ID" - ) - ) - credentials = Credentials( - url=values["url"].get_secret_value() if values["url"] else None, - api_key=values["apikey"].get_secret_value() - if values["apikey"] - else None, - token=values["token"].get_secret_value() if values["token"] else None, - password=values["password"].get_secret_value() - if values["password"] - else None, - username=values["username"].get_secret_value() - if values["username"] - else None, - instance_id=values["instance_id"].get_secret_value() - if values["instance_id"] - else None, - version=values["version"].get_secret_value() - if values["version"] - else None, - verify=values["verify"], - ) - - watsonx_model = ModelInference( - model_id=values["model_id"], - deployment_id=values["deployment_id"], - credentials=credentials, - params=values["params"], - project_id=values["project_id"], - space_id=values["space_id"], - ) - values["watsonx_model"] = watsonx_model - - return values - - @property - def _identifying_params(self) -> Mapping[str, Any]: - """Get the identifying parameters.""" - return { - "model_id": self.model_id, - "deployment_id": self.deployment_id, - "params": self.params, - "project_id": self.project_id, - "space_id": self.space_id, - } - - @property - def _llm_type(self) -> str: - """Return type of llm.""" - return "IBM watsonx.ai" - - @staticmethod - def _extract_token_usage( - response: Optional[List[Dict[str, Any]]] = None, - ) -> Dict[str, Any]: - if response is None: - return {"generated_token_count": 0, "input_token_count": 0} - - input_token_count = 0 - generated_token_count = 0 - - def get_count_value(key: str, result: Dict[str, Any]) -> int: - return result.get(key, 0) or 0 - - for res in response: - results = res.get("results") - if results: - input_token_count += get_count_value("input_token_count", results[0]) - generated_token_count += get_count_value( - "generated_token_count", results[0] - ) - - return { - "generated_token_count": generated_token_count, - "input_token_count": input_token_count, - } - - def _get_chat_params( - self, stop: Optional[List[str]] = None, **kwargs: Any - ) -> Optional[Dict[str, Any]]: - params = {**self.params} if self.params else {} - params = params | {**kwargs.get("params", {})} - if stop is not None: - if params and "stop_sequences" in params: - raise ValueError( - "`stop_sequences` found in both the input and default params." - ) - params = (params or {}) | {"stop_sequences": stop} - return params - - def _create_llm_result(self, response: List[dict]) -> LLMResult: - """Create the LLMResult from the choices and prompts.""" - generations = [] - for res in response: - results = res.get("results") - if results: - finish_reason = results[0].get("stop_reason") - gen = Generation( - text=results[0].get("generated_text"), - generation_info={"finish_reason": finish_reason}, - ) - generations.append([gen]) - final_token_usage = self._extract_token_usage(response) - llm_output = { - "token_usage": final_token_usage, - "model_id": self.model_id, - "deployment_id": self.deployment_id, - } - return LLMResult(generations=generations, llm_output=llm_output) - - def _stream_response_to_generation_chunk( - self, - stream_response: Dict[str, Any], - ) -> GenerationChunk: - """Convert a stream response to a generation chunk.""" - if not stream_response["results"]: - return GenerationChunk(text="") - return GenerationChunk( - text=stream_response["results"][0]["generated_text"], - generation_info=dict( - finish_reason=stream_response["results"][0].get("stop_reason", None), - llm_output={ - "model_id": self.model_id, - "deployment_id": self.deployment_id, - }, - ), - ) - - def _call( - self, - prompt: str, - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, - ) -> str: - """Call the IBM watsonx.ai inference endpoint. - Args: - prompt: The prompt to pass into the model. - stop: Optional list of stop words to use when generating. - run_manager: Optional callback manager. - Returns: - The string generated by the model. - Example: - .. code-block:: python - - response = watsonx_llm.invoke("What is a molecule") - """ - result = self._generate( - prompts=[prompt], stop=stop, run_manager=run_manager, **kwargs - ) - return result.generations[0][0].text - - def _generate( - self, - prompts: List[str], - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - stream: Optional[bool] = None, - **kwargs: Any, - ) -> LLMResult: - """Call the IBM watsonx.ai inference endpoint which then generate the response. - Args: - prompts: List of strings (prompts) to pass into the model. - stop: Optional list of stop words to use when generating. - run_manager: Optional callback manager. - Returns: - The full LLMResult output. - Example: - .. code-block:: python - - response = watsonx_llm.generate(["What is a molecule"]) - """ - params = self._get_chat_params(stop=stop, **kwargs) - should_stream = stream if stream is not None else self.streaming - if should_stream: - if len(prompts) > 1: - raise ValueError( - f"WatsonxLLM currently only supports single prompt, got {prompts}" - ) - generation = GenerationChunk(text="") - stream_iter = self._stream( - prompts[0], stop=stop, run_manager=run_manager, **kwargs - ) - for chunk in stream_iter: - if generation is None: - generation = chunk - else: - generation += chunk - assert generation is not None - if isinstance(generation.generation_info, dict): - llm_output = generation.generation_info.pop("llm_output") - return LLMResult(generations=[[generation]], llm_output=llm_output) - return LLMResult(generations=[[generation]]) - else: - response = self.watsonx_model.generate( - prompt=prompts, **(kwargs | {"params": params}) - ) - return self._create_llm_result(response) - - def _stream( - self, - prompt: str, - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, - ) -> Iterator[GenerationChunk]: - """Call the IBM watsonx.ai inference endpoint which then streams the response. - Args: - prompt: The prompt to pass into the model. - stop: Optional list of stop words to use when generating. - run_manager: Optional callback manager. - Returns: - The iterator which yields generation chunks. - Example: - .. code-block:: python - - response = watsonx_llm.stream("What is a molecule") - for chunk in response: - print(chunk, end='') - """ - params = self._get_chat_params(stop=stop, **kwargs) - for stream_resp in self.watsonx_model.generate_text_stream( - prompt=prompt, raw_response=True, **(kwargs | {"params": params}) - ): - if not isinstance(stream_resp, dict): - stream_resp = stream_resp.dict() - chunk = self._stream_response_to_generation_chunk(stream_resp) - - if run_manager: - run_manager.on_llm_new_token(chunk.text, chunk=chunk) - yield chunk - - def get_num_tokens(self, text: str) -> int: - response = self.watsonx_model.tokenize(text, return_tokens=False) - return response["result"]["token_count"] - - def get_token_ids(self, text: str) -> List[int]: - raise NotImplementedError("API does not support returning token ids.") diff --git a/libs/partners/ibm/langchain_ibm/py.typed b/libs/partners/ibm/langchain_ibm/py.typed deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libs/partners/ibm/poetry.lock b/libs/partners/ibm/poetry.lock deleted file mode 100644 index 682ed4725d7..00000000000 --- a/libs/partners/ibm/poetry.lock +++ /dev/null @@ -1,1150 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "codespell" -version = "2.3.0" -description = "Codespell" -optional = false -python-versions = ">=3.8" -files = [ - {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, - {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, -] - -[package.extras] -dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] -hard-encoding-detection = ["chardet"] -toml = ["tomli"] -types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.1" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "freezegun" -version = "1.5.1" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -files = [ - {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, - {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "ibm-cos-sdk" -version = "2.13.5" -description = "IBM SDK for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ibm-cos-sdk-2.13.5.tar.gz", hash = "sha256:1aff7f9863ac9072a3db2f0053bec99478b26f3fb5fa797ce96a15bbb13cd40e"}, -] - -[package.dependencies] -ibm-cos-sdk-core = "2.13.5" -ibm-cos-sdk-s3transfer = "2.13.5" -jmespath = ">=0.10.0,<=1.0.1" - -[[package]] -name = "ibm-cos-sdk-core" -version = "2.13.5" -description = "Low-level, data-driven core of IBM SDK for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ibm-cos-sdk-core-2.13.5.tar.gz", hash = "sha256:d3a99d8b06b3f8c00b1a9501f85538d592463e63ddf8cec32672ab5a0b107b83"}, -] - -[package.dependencies] -jmespath = ">=0.10.0,<=1.0.1" -python-dateutil = ">=2.9.0,<3.0.0" -requests = ">=2.32.3,<3.0" -urllib3 = {version = ">=1.26.18,<2.2", markers = "python_version >= \"3.10\""} - -[[package]] -name = "ibm-cos-sdk-s3transfer" -version = "2.13.5" -description = "IBM S3 Transfer Manager" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ibm-cos-sdk-s3transfer-2.13.5.tar.gz", hash = "sha256:9649b1f2201c6de96ff5a6b5a3686de3a809e6ef3b8b12c7c4f2f7ce72da7749"}, -] - -[package.dependencies] -ibm-cos-sdk-core = "2.13.5" - -[[package]] -name = "ibm-watsonx-ai" -version = "1.0.10" -description = "IBM watsonx.ai API Client" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ibm_watsonx_ai-1.0.10-py3-none-any.whl", hash = "sha256:6358838fc8e5a88f336a55063f3a8efee01311a1f6cab65acb5a7fbb7129670f"}, - {file = "ibm_watsonx_ai-1.0.10.tar.gz", hash = "sha256:e432396399efa342e5e41c6383cf2425890acd49e2b972eb6ea63c9f8409105f"}, -] - -[package.dependencies] -certifi = "*" -ibm-cos-sdk = ">=2.12.0,<2.14.0" -importlib-metadata = "*" -lomond = "*" -packaging = "*" -pandas = ">=0.24.2,<2.2.0" -requests = "*" -tabulate = "*" -urllib3 = "*" - -[package.extras] -fl-crypto = ["pyhelayers (==1.5.0.3)"] -fl-crypto-rt24-1 = ["pyhelayers (==1.5.3.1)"] -fl-rt23-1-py3-10 = ["GPUtil", "cryptography (==42.0.5)", "ddsketch (==2.0.4)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "lz4", "msgpack (==1.0.7)", "msgpack-numpy (==0.4.8)", "numcompress (==0.1.2)", "numpy (==1.23.5)", "pandas (==1.5.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.32.3)", "scikit-learn (==1.1.1)", "scipy (==1.10.1)", "setproctitle", "skops (==0.9.0)", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.12.0)", "torch (==2.0.1)", "websockets (==10.1)"] -fl-rt24-1-py3-11 = ["GPUtil", "cryptography (==42.0.5)", "ddsketch (==2.0.4)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.3.2)", "lz4", "msgpack (==1.0.7)", "msgpack-numpy (==0.4.8)", "numcompress (==0.1.2)", "numpy (==1.26.4)", "pandas (==2.1.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.32.3)", "scikit-learn (==1.3.2)", "scipy (==1.11.4)", "setproctitle", "skops (==0.9.0)", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.14.1)", "torch (==2.1.2)", "websockets (==10.1)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.0.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "langchain-core" -version = "0.2.17" -description = "Building applications with LLMs through composability" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -jsonpatch = "^1.33" -langsmith = "^0.1.75" -packaging = ">=23.2,<25" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -PyYAML = ">=5.3" -tenacity = "^8.1.0,!=8.4.0" - -[package.source] -type = "directory" -url = "../../core" - -[[package]] -name = "langsmith" -version = "0.1.83" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langsmith-0.1.83-py3-none-any.whl", hash = "sha256:f54d8cd8479b648b6339f3f735d19292c3516d080f680933ecdca3eab4b67ed3"}, - {file = "langsmith-0.1.83.tar.gz", hash = "sha256:5cdd947212c8ad19adb992c06471c860185a777daa6859bb47150f90daf64bf3"}, -] - -[package.dependencies] -orjson = ">=3.9.14,<4.0.0" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -requests = ">=2,<3" - -[[package]] -name = "lomond" -version = "0.3.3" -description = "Websocket Client Library" -optional = false -python-versions = "*" -files = [ - {file = "lomond-0.3.3-py2.py3-none-any.whl", hash = "sha256:df1dd4dd7b802a12b71907ab1abb08b8ce9950195311207579379eb3b1553de7"}, - {file = "lomond-0.3.3.tar.gz", hash = "sha256:427936596b144b4ec387ead99aac1560b77c8a78107d3d49415d3abbe79acbd3"}, -] - -[package.dependencies] -six = ">=1.10.0" - -[[package]] -name = "mypy" -version = "1.10.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "orjson" -version = "3.10.6" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, - {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, - {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, - {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, - {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, - {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, - {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, - {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, - {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, - {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, - {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, - {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, - {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, - {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, - {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, - {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "2.1.4" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.1" - -[package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pydantic" -version = "2.8.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.20.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.21.2" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, - {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "pytest-watcher" -version = "0.3.5" -description = "Automatically rerun your tests on file modifications" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, - {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, -] - -[package.dependencies] -tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} -watchdog = ">=2.0.0" - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "ruff" -version = "0.5.0" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.5.0-py3-none-linux_armv6l.whl", hash = "sha256:ee770ea8ab38918f34e7560a597cc0a8c9a193aaa01bfbd879ef43cb06bd9c4c"}, - {file = "ruff-0.5.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38f3b8327b3cb43474559d435f5fa65dacf723351c159ed0dc567f7ab735d1b6"}, - {file = "ruff-0.5.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7594f8df5404a5c5c8f64b8311169879f6cf42142da644c7e0ba3c3f14130370"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc7012d6ec85032bc4e9065110df205752d64010bed5f958d25dbee9ce35de3"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d505fb93b0fabef974b168d9b27c3960714d2ecda24b6ffa6a87ac432905ea38"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dc5cfd3558f14513ed0d5b70ce531e28ea81a8a3b1b07f0f48421a3d9e7d80a"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:db3ca35265de239a1176d56a464b51557fce41095c37d6c406e658cf80bbb362"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1a321c4f68809fddd9b282fab6a8d8db796b270fff44722589a8b946925a2a8"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c4dfcd8d34b143916994b3876b63d53f56724c03f8c1a33a253b7b1e6bf2a7d"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81e5facfc9f4a674c6a78c64d38becfbd5e4f739c31fcd9ce44c849f1fad9e4c"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e589e27971c2a3efff3fadafb16e5aef7ff93250f0134ec4b52052b673cf988d"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2ffbc3715a52b037bcb0f6ff524a9367f642cdc5817944f6af5479bbb2eb50e"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cd096e23c6a4f9c819525a437fa0a99d1c67a1b6bb30948d46f33afbc53596cf"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:46e193b36f2255729ad34a49c9a997d506e58f08555366b2108783b3064a0e1e"}, - {file = "ruff-0.5.0-py3-none-win32.whl", hash = "sha256:49141d267100f5ceff541b4e06552e98527870eafa1acc9dec9139c9ec5af64c"}, - {file = "ruff-0.5.0-py3-none-win_amd64.whl", hash = "sha256:e9118f60091047444c1b90952736ee7b1792910cab56e9b9a9ac20af94cd0440"}, - {file = "ruff-0.5.0-py3-none-win_arm64.whl", hash = "sha256:ed5c4df5c1fb4518abcb57725b576659542bdbe93366f4f329e8f398c4b71178"}, - {file = "ruff-0.5.0.tar.gz", hash = "sha256:eb641b5873492cf9bd45bc9c5ae5320648218e04386a5f0c264ad6ccce8226a1"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "syrupy" -version = "4.6.1" -description = "Pytest Snapshot Test Utility" -optional = false -python-versions = ">=3.8.1,<4" -files = [ - {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, - {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9.0.0" - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tenacity" -version = "8.4.2" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"}, - {file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "types-requests" -version = "2.32.0.20240622" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20240622.tar.gz", hash = "sha256:ed5e8a412fcc39159d6319385c009d642845f250c63902718f605cd90faade31"}, - {file = "types_requests-2.32.0.20240622-py3-none-any.whl", hash = "sha256:97bac6b54b5bd4cf91d407e62f0932a74821bc2211f22116d9ee1dd643826caf"}, -] - -[package.dependencies] -urllib3 = ">=2" - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "urllib3" -version = "2.1.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "watchdog" -version = "4.0.1" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "zipp" -version = "3.19.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.10,<4.0" -content-hash = "d58e08764f9c09447d239fc2b7926f48e0e167d99eb30f559acad0628de8fe0b" diff --git a/libs/partners/ibm/pyproject.toml b/libs/partners/ibm/pyproject.toml deleted file mode 100644 index 2a3f0904d39..00000000000 --- a/libs/partners/ibm/pyproject.toml +++ /dev/null @@ -1,81 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -name = "langchain-ibm" -version = "0.1.10" -description = "An integration package connecting IBM watsonx.ai and LangChain" -authors = [ "IBM",] -readme = "README.md" -repository = "https://github.com/langchain-ai/langchain" -license = "MIT" - -[tool.mypy] -disallow_untyped_defs = "True" - -[tool.poetry.urls] -"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/ibm" - -[tool.poetry.dependencies] -python = ">=3.10,<4.0" -langchain-core = ">=0.2.17,<0.3" -ibm-watsonx-ai = "^1.0.8" - -[tool.ruff.lint] -select = [ "E", "F", "I",] - -[tool.coverage.run] -omit = [ "tests/*",] - -[tool.pytest.ini_options] -addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" -markers = [ "requires: mark tests as requiring a specific library", "asyncio: mark tests as requiring asyncio", "compile: mark placeholder test used to compile integration tests without running them", "scheduled: mark tests to run in scheduled testing",] -asyncio_mode = "auto" - -[tool.poetry.group.test] -optional = true - -[tool.poetry.group.codespell] -optional = true - -[tool.poetry.group.test_integration] -optional = true - -[tool.poetry.group.lint] -optional = true - -[tool.poetry.group.dev] -optional = true - -[tool.poetry.group.test.dependencies] -pytest = "^7.3.0" -freezegun = "^1.2.2" -pytest-mock = "^3.10.0" -syrupy = "^4.0.2" -pytest-watcher = "^0.3.4" -pytest-asyncio = "^0.21.1" - -[tool.poetry.group.codespell.dependencies] -codespell = "^2.2.0" - -[tool.poetry.group.test_integration.dependencies] - -[tool.poetry.group.lint.dependencies] -ruff = "^0.5" - -[tool.poetry.group.typing.dependencies] -mypy = "^1.10" -types-requests = "^2" - -[tool.poetry.group.test.dependencies.langchain-core] -path = "../../core" -develop = true - -[tool.poetry.group.dev.dependencies.langchain-core] -path = "../../core" -develop = true - -[tool.poetry.group.typing.dependencies.langchain-core] -path = "../../core" -develop = true diff --git a/libs/partners/ibm/scripts/check_imports.py b/libs/partners/ibm/scripts/check_imports.py deleted file mode 100644 index fd21a4975b7..00000000000 --- a/libs/partners/ibm/scripts/check_imports.py +++ /dev/null @@ -1,17 +0,0 @@ -import sys -import traceback -from importlib.machinery import SourceFileLoader - -if __name__ == "__main__": - files = sys.argv[1:] - has_failure = False - for file in files: - try: - SourceFileLoader("x", file).load_module() - except Exception: - has_faillure = True - print(file) - traceback.print_exc() - print() - - sys.exit(1 if has_failure else 0) diff --git a/libs/partners/ibm/scripts/check_pydantic.sh b/libs/partners/ibm/scripts/check_pydantic.sh deleted file mode 100755 index 06b5bb81ae2..00000000000 --- a/libs/partners/ibm/scripts/check_pydantic.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# This script searches for lines starting with "import pydantic" or "from pydantic" -# in tracked files within a Git repository. -# -# Usage: ./scripts/check_pydantic.sh /path/to/repository - -# Check if a path argument is provided -if [ $# -ne 1 ]; then - echo "Usage: $0 /path/to/repository" - exit 1 -fi - -repository_path="$1" - -# Search for lines matching the pattern within the specified repository -result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') - -# Check if any matching lines were found -if [ -n "$result" ]; then - echo "ERROR: The following lines need to be updated:" - echo "$result" - echo "Please replace the code with an import from langchain_core.pydantic_v1." - echo "For example, replace 'from pydantic import BaseModel'" - echo "with 'from langchain_core.pydantic_v1 import BaseModel'" - exit 1 -fi diff --git a/libs/partners/ibm/scripts/lint_imports.sh b/libs/partners/ibm/scripts/lint_imports.sh deleted file mode 100755 index 695613c7ba8..00000000000 --- a/libs/partners/ibm/scripts/lint_imports.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -eu - -# Initialize a variable to keep track of errors -errors=0 - -# make sure not importing from langchain or langchain_experimental -git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) -git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) - -# Decide on an exit status based on the errors -if [ "$errors" -gt 0 ]; then - exit 1 -else - exit 0 -fi diff --git a/libs/partners/ibm/tests/__init__.py b/libs/partners/ibm/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libs/partners/ibm/tests/integration_tests/__init__.py b/libs/partners/ibm/tests/integration_tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libs/partners/ibm/tests/integration_tests/test_chat_models.py b/libs/partners/ibm/tests/integration_tests/test_chat_models.py deleted file mode 100644 index 440472fb201..00000000000 --- a/libs/partners/ibm/tests/integration_tests/test_chat_models.py +++ /dev/null @@ -1,243 +0,0 @@ -import json -import os - -from ibm_watsonx_ai.metanames import GenTextParamsMetaNames # type: ignore -from langchain_core.messages import ( - AIMessage, - BaseMessage, - HumanMessage, - SystemMessage, -) -from langchain_core.prompts import ChatPromptTemplate -from langchain_core.pydantic_v1 import BaseModel - -from langchain_ibm import ChatWatsonx - -WX_APIKEY = os.environ.get("WATSONX_APIKEY", "") -WX_PROJECT_ID = os.environ.get("WATSONX_PROJECT_ID", "") - -URL = "https://us-south.ml.cloud.ibm.com" -MODEL_ID = "mistralai/mixtral-8x7b-instruct-v01" - - -def test_01_generate_chat() -> None: - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - messages = [ - ("system", "You are a helpful assistant that translates English to French."), - ( - "human", - "Translate this sentence from English to French. I love programming.", - ), - ] - response = chat.invoke(messages) - assert response - - -def test_01a_generate_chat_with_invoke_params() -> None: - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - - params = { - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - messages = [ - ("system", "You are a helpful assistant that translates English to French."), - ( - "human", - "Translate this sentence from English to French. I love programming.", - ), - ] - response = chat.invoke(messages, params=params) - assert response - - -def test_01b_generate_chat_with_invoke_params() -> None: - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - - params_1 = { - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - params_2 = { - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - chat = ChatWatsonx( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=params_1, # type: ignore[arg-type] - ) - messages = [ - ("system", "You are a helpful assistant that translates English to French."), - ( - "human", - "Translate this sentence from English to French. I love programming.", - ), - ] - response = chat.invoke(messages, params=params_2) - assert response - - -def test_02_generate_chat_with_few_inputs() -> None: - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - message = HumanMessage(content="Hello") - response = chat.generate([[message], [message]]) - assert response - - -def test_03_generate_chat_with_few_various_inputs() -> None: - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - system_message = SystemMessage(content="You are to chat with the user.") - human_message = HumanMessage(content="Hello") - response = chat.invoke([system_message, human_message]) - assert isinstance(response, BaseMessage) - assert isinstance(response.content, str) - - -def test_05_generate_chat_with_stream() -> None: - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - response = chat.stream("What's the weather in san francisco") - for chunk in response: - assert isinstance(chunk.content, str) - - -def test_05a_generate_chat_with_stream_with_param() -> None: - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - - params = { - GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - response = chat.stream("What's the weather in san francisco", params=params) - for chunk in response: - assert isinstance(chunk.content, str) - - -def test_10_chaining() -> None: - chat = ChatWatsonx(model_id=MODEL_ID, url=URL, project_id=WX_PROJECT_ID) # type: ignore[arg-type] - prompt = ChatPromptTemplate.from_messages( - [ - ( - "system", - "You are a helpful assistant that " - "translates {input_language} to {output_language}.", - ), - ("human", "{input}"), - ] - ) - chain = prompt | chat - - response = chain.invoke( - { - "input_language": "English", - "output_language": "German", - "input": "I love programming.", - } - ) - assert response - - -def test_11_chaining_with_params() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MIN_NEW_TOKENS: 5, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - chat = ChatWatsonx( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=parameters, # type: ignore[arg-type] - ) - prompt = ChatPromptTemplate.from_messages( - [ - ( - "system", - "You are a helpful assistant that translates " - "{input_language} to {output_language}.", - ), - ("human", "{input}"), - ] - ) - chain = prompt | chat - - response = chain.invoke( - { - "input_language": "English", - "output_language": "German", - "input": "I love programming.", - } - ) - assert response - - -def test_20_tool_choice() -> None: - """Test that tool choice is respected.""" - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - - params = {GenTextParamsMetaNames.MAX_NEW_TOKENS: 500} - chat = ChatWatsonx( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=params, # type: ignore[arg-type] - ) - - class MyTool(BaseModel): - name: str - age: int - - with_tool = chat.bind_tools([MyTool], tool_choice="MyTool") - - resp = with_tool.invoke("Who was the 27 year old named Erick?") - assert isinstance(resp, AIMessage) - assert resp.content == "" # should just be tool call - tool_calls = resp.additional_kwargs["tool_calls"] - assert len(tool_calls) == 1 - tool_call = tool_calls[0] - assert tool_call["function"]["name"] == "MyTool" - assert json.loads(tool_call["function"]["arguments"]) == { - "age": 27, - "name": "Erick", - } - assert tool_call["type"] == "function" - assert isinstance(resp.tool_calls, list) - assert len(resp.tool_calls) == 1 - tool_call = resp.tool_calls[0] - assert tool_call["name"] == "MyTool" - assert tool_call["args"] == {"age": 27, "name": "Erick"} - - -def test_21_tool_choice_bool() -> None: - """Test that tool choice is respected just passing in True.""" - from ibm_watsonx_ai.metanames import GenTextParamsMetaNames - - params = {GenTextParamsMetaNames.MAX_NEW_TOKENS: 500} - chat = ChatWatsonx( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=params, # type: ignore[arg-type] - ) - - class MyTool(BaseModel): - name: str - age: int - - with_tool = chat.bind_tools([MyTool], tool_choice=True) - - resp = with_tool.invoke("Who was the 27 year old named Erick?") - assert isinstance(resp, AIMessage) - assert resp.content == "" # should just be tool call - tool_calls = resp.additional_kwargs["tool_calls"] - assert len(tool_calls) == 1 - tool_call = tool_calls[0] - assert tool_call["function"]["name"] == "MyTool" - assert json.loads(tool_call["function"]["arguments"]) == { - "age": 27, - "name": "Erick", - } - assert tool_call["type"] == "function" diff --git a/libs/partners/ibm/tests/integration_tests/test_compile.py b/libs/partners/ibm/tests/integration_tests/test_compile.py deleted file mode 100644 index 5196f4c0f8c..00000000000 --- a/libs/partners/ibm/tests/integration_tests/test_compile.py +++ /dev/null @@ -1,7 +0,0 @@ -import pytest # type: ignore[import-not-found] - - -@pytest.mark.compile -def test_placeholder() -> None: - """Used for compiling integration tests without running any real tests.""" - pass diff --git a/libs/partners/ibm/tests/integration_tests/test_embeddings.py b/libs/partners/ibm/tests/integration_tests/test_embeddings.py deleted file mode 100644 index 6007467eb2a..00000000000 --- a/libs/partners/ibm/tests/integration_tests/test_embeddings.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Test WatsonxEmbeddings. - -You'll need to set WATSONX_APIKEY and WATSONX_PROJECT_ID environment variables. -""" - -import os - -from ibm_watsonx_ai import APIClient # type: ignore -from ibm_watsonx_ai.metanames import EmbedTextParamsMetaNames # type: ignore - -from langchain_ibm import WatsonxEmbeddings - -WX_APIKEY = os.environ.get("WATSONX_APIKEY", "") -WX_PROJECT_ID = os.environ.get("WATSONX_PROJECT_ID", "") - -URL = "https://us-south.ml.cloud.ibm.com" -MODEL_ID = "ibm/slate-125m-english-rtrvr" - -DOCUMENTS = ["What is a generative ai?", "What is a loan and how does it works?"] - - -def test_01_generate_embed_documents() -> None: - watsonx_embedding = WatsonxEmbeddings( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, # type: ignore[arg-type] - ) - generate_embedding = watsonx_embedding.embed_documents(texts=DOCUMENTS) - assert len(generate_embedding) == len(DOCUMENTS) - assert all(isinstance(el, float) for el in generate_embedding[0]) - - -def test_02_generate_embed_query() -> None: - watsonx_embedding = WatsonxEmbeddings( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - generate_embedding = watsonx_embedding.embed_query(text=DOCUMENTS[0]) - assert isinstance(generate_embedding, list) and isinstance( - generate_embedding[0], float - ) - - -def test_03_generate_embed_documents_with_param() -> None: - embed_params = { - EmbedTextParamsMetaNames.TRUNCATE_INPUT_TOKENS: 3, - } - watsonx_embedding = WatsonxEmbeddings( - model_id=MODEL_ID, - url=URL, # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=embed_params, # type: ignore[arg-type] - ) - generate_embedding = watsonx_embedding.embed_documents(texts=DOCUMENTS) - assert len(generate_embedding) == len(DOCUMENTS) - assert all(isinstance(el, float) for el in generate_embedding[0]) - - -def test_10_generate_embed_query_with_client_initialization() -> None: - watsonx_client = APIClient( - credentials={ - "url": URL, - "apikey": WX_APIKEY, - } - ) - watsonx_embedding = WatsonxEmbeddings( - model_id=MODEL_ID, project_id=WX_PROJECT_ID, watsonx_client=watsonx_client - ) - generate_embedding = watsonx_embedding.embed_query(text=DOCUMENTS[0]) - assert isinstance(generate_embedding, list) and isinstance( - generate_embedding[0], float - ) diff --git a/libs/partners/ibm/tests/integration_tests/test_llms.py b/libs/partners/ibm/tests/integration_tests/test_llms.py deleted file mode 100644 index cf0a39e9971..00000000000 --- a/libs/partners/ibm/tests/integration_tests/test_llms.py +++ /dev/null @@ -1,292 +0,0 @@ -"""Test WatsonxLLM API wrapper. - -You'll need to set WATSONX_APIKEY and WATSONX_PROJECT_ID environment variables. -""" - -import os - -from ibm_watsonx_ai import Credentials # type: ignore -from ibm_watsonx_ai.foundation_models import Model, ModelInference # type: ignore -from ibm_watsonx_ai.foundation_models.utils.enums import ( # type: ignore - DecodingMethods, - ModelTypes, -) -from ibm_watsonx_ai.metanames import GenTextParamsMetaNames # type: ignore -from langchain_core.outputs import LLMResult - -from langchain_ibm import WatsonxLLM - -WX_APIKEY = os.environ.get("WATSONX_APIKEY", "") -WX_PROJECT_ID = os.environ.get("WATSONX_PROJECT_ID", "") -MODEL_ID = "google/flan-ul2" - - -def test_watsonxllm_invoke() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_with_params() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 5, - } - - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=parameters, - ) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_with_params_2() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 5, - } - - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.invoke("What color sunflower is?", params=parameters) - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_with_params_3() -> None: - parameters_1 = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - } - parameters_2 = { - GenTextParamsMetaNames.MIN_NEW_TOKENS: 5, - } - - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - params=parameters_1, - ) - response = watsonxllm.invoke("What color sunflower is?", params=parameters_2) - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_generate() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.generate(["What color sunflower is?"]) - print(f"\nResponse: {response}") - response_text = response.generations[0][0].text - print(f"Response text: {response_text}") - assert isinstance(response, LLMResult) - assert len(response_text) > 0 - - -def test_watsonxllm_generate_with_param() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 10, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 5, - } - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.generate(["What color sunflower is?"], params=parameters) - print(f"\nResponse: {response}") - response_text = response.generations[0][0].text - print(f"Response text: {response_text}") - assert isinstance(response, LLMResult) - assert len(response_text) > 0 - - -def test_watsonxllm_generate_with_multiple_prompts() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.generate( - ["What color sunflower is?", "What color turtle is?"] - ) - print(f"\nResponse: {response}") - response_text = response.generations[0][0].text - print(f"Response text: {response_text}") - assert isinstance(response, LLMResult) - assert len(response_text) > 0 - - -def test_watsonxllm_generate_stream() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.generate(["What color sunflower is?"], stream=True) - print(f"\nResponse: {response}") - response_text = response.generations[0][0].text - print(f"Response text: {response_text}") - assert isinstance(response, LLMResult) - assert len(response_text) > 0 - - -def test_watsonxllm_stream() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - - stream_response = watsonxllm.stream("What color sunflower is?") - - linked_text_stream = "" - for chunk in stream_response: - assert isinstance( - chunk, str - ), f"chunk expect type '{str}', actual '{type(chunk)}'" - linked_text_stream += chunk - print(f"Linked text stream: {linked_text_stream}") - assert ( - response == linked_text_stream - ), "Linked text stream are not the same as generated text" - - -def test_watsonxllm_invoke_from_wx_model() -> None: - model = Model( - model_id=MODEL_ID, - credentials={ - "apikey": WX_APIKEY, - "url": "https://us-south.ml.cloud.ibm.com", - }, - project_id=WX_PROJECT_ID, - ) - watsonxllm = WatsonxLLM(watsonx_model=model) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_from_wx_model_inference() -> None: - credentials = Credentials( - api_key=WX_APIKEY, url="https://us-south.ml.cloud.ibm.com" - ) - model = ModelInference( - model_id=MODEL_ID, - credentials=credentials, - project_id=WX_PROJECT_ID, - ) - watsonxllm = WatsonxLLM(watsonx_model=model) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_from_wx_model_inference_with_params() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: "sample", - GenTextParamsMetaNames.MAX_NEW_TOKENS: 100, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 10, - GenTextParamsMetaNames.TEMPERATURE: 0.5, - GenTextParamsMetaNames.TOP_K: 50, - GenTextParamsMetaNames.TOP_P: 1, - } - model = ModelInference( - model_id=MODEL_ID, - credentials={ - "apikey": WX_APIKEY, - "url": "https://us-south.ml.cloud.ibm.com", - }, - project_id=WX_PROJECT_ID, - params=parameters, - ) - watsonxllm = WatsonxLLM(watsonx_model=model) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -def test_watsonxllm_invoke_from_wx_model_inference_with_params_as_enum() -> None: - parameters = { - GenTextParamsMetaNames.DECODING_METHOD: DecodingMethods.GREEDY, - GenTextParamsMetaNames.MAX_NEW_TOKENS: 100, - GenTextParamsMetaNames.MIN_NEW_TOKENS: 10, - GenTextParamsMetaNames.TEMPERATURE: 0.5, - GenTextParamsMetaNames.TOP_K: 50, - GenTextParamsMetaNames.TOP_P: 1, - } - model = ModelInference( - model_id=ModelTypes.FLAN_UL2, - credentials={ - "apikey": WX_APIKEY, - "url": "https://us-south.ml.cloud.ibm.com", - }, - project_id=WX_PROJECT_ID, - params=parameters, - ) - watsonxllm = WatsonxLLM(watsonx_model=model) - response = watsonxllm.invoke("What color sunflower is?") - print(f"\nResponse: {response}") - assert isinstance(response, str) - assert len(response) > 0 - - -async def test_watsonx_ainvoke() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = await watsonxllm.ainvoke("What color sunflower is?") - assert isinstance(response, str) - - -async def test_watsonx_agenerate() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - response = await watsonxllm.agenerate( - ["What color sunflower is?", "What color turtle is?"] - ) - assert len(response.generations) > 0 - assert response.llm_output["token_usage"]["generated_token_count"] != 0 # type: ignore - - -def test_get_num_tokens() -> None: - watsonxllm = WatsonxLLM( - model_id=MODEL_ID, - url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type] - project_id=WX_PROJECT_ID, - ) - num_tokens = watsonxllm.get_num_tokens("What color sunflower is?") - assert num_tokens > 0 diff --git a/libs/partners/ibm/tests/unit_tests/__init__.py b/libs/partners/ibm/tests/unit_tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libs/partners/ibm/tests/unit_tests/test_chat_models.py b/libs/partners/ibm/tests/unit_tests/test_chat_models.py deleted file mode 100644 index 54256712a99..00000000000 --- a/libs/partners/ibm/tests/unit_tests/test_chat_models.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Test ChatWatsonx API wrapper.""" - -import os - -from langchain_ibm import ChatWatsonx - -os.environ.pop("WATSONX_APIKEY", None) -os.environ.pop("WATSONX_PROJECT_ID", None) - -MODEL_ID = "mistralai/mixtral-8x7b-instruct-v01" - - -def test_initialize_chat_watsonx_bad_path_without_url() -> None: - try: - ChatWatsonx( - model_id=MODEL_ID, - ) - except ValueError as e: - assert "WATSONX_URL" in e.__str__() - - -def test_initialize_chat_watsonx_cloud_bad_path() -> None: - try: - ChatWatsonx(model_id=MODEL_ID, url="https://us-south.ml.cloud.ibm.com") # type: ignore[arg-type] - except ValueError as e: - assert "WATSONX_APIKEY" in e.__str__() - - -def test_initialize_chat_watsonx_cpd_bad_path_without_all() -> None: - try: - ChatWatsonx( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - ) - except ValueError as e: - assert ( - "WATSONX_APIKEY" in e.__str__() - and "WATSONX_PASSWORD" in e.__str__() - and "WATSONX_TOKEN" in e.__str__() - ) - - -def test_initialize_chat_watsonx_cpd_bad_path_password_without_username() -> None: - try: - ChatWatsonx( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - password="test_password", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__() - - -def test_initialize_chat_watsonx_cpd_bad_path_apikey_without_username() -> None: - try: - ChatWatsonx( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - apikey="test_apikey", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__() diff --git a/libs/partners/ibm/tests/unit_tests/test_embeddings.py b/libs/partners/ibm/tests/unit_tests/test_embeddings.py deleted file mode 100644 index ceb0a82965a..00000000000 --- a/libs/partners/ibm/tests/unit_tests/test_embeddings.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Test WatsonxLLM API wrapper.""" - -import os - -from langchain_ibm import WatsonxEmbeddings - -os.environ.pop("WATSONX_APIKEY", None) -os.environ.pop("WATSONX_PROJECT_ID", None) - -MODEL_ID = "ibm/slate-125m-english-rtrvr" - - -def test_initialize_watsonx_embeddings_bad_path_without_url() -> None: - try: - WatsonxEmbeddings( - model_id=MODEL_ID, - ) - except ValueError as e: - assert "WATSONX_URL" in e.__str__() - - -def test_initialize_watsonx_embeddings_cloud_bad_path() -> None: - try: - WatsonxEmbeddings(model_id=MODEL_ID, url="https://us-south.ml.cloud.ibm.com") # type: ignore[arg-type] - except ValueError as e: - assert "WATSONX_APIKEY" in e.__str__() - - -def test_initialize_watsonx_embeddings_cpd_bad_path_without_all() -> None: - try: - WatsonxEmbeddings( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - ) - except ValueError as e: - assert ( - "WATSONX_APIKEY" in e.__str__() - and "WATSONX_PASSWORD" in e.__str__() - and "WATSONX_TOKEN" in e.__str__() - ) - - -def test_initialize_watsonx_embeddings_cpd_bad_path_password_without_username() -> None: - try: - WatsonxEmbeddings( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - password="test_password", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__() - - -def test_initialize_watsonx_embeddings_cpd_bad_path_apikey_without_username() -> None: - try: - WatsonxEmbeddings( - model_id=MODEL_ID, - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - apikey="test_apikey", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__() diff --git a/libs/partners/ibm/tests/unit_tests/test_imports.py b/libs/partners/ibm/tests/unit_tests/test_imports.py deleted file mode 100644 index 7bdeb3246c0..00000000000 --- a/libs/partners/ibm/tests/unit_tests/test_imports.py +++ /dev/null @@ -1,7 +0,0 @@ -from langchain_ibm import __all__ - -EXPECTED_ALL = ["WatsonxLLM", "WatsonxEmbeddings", "ChatWatsonx"] - - -def test_all_imports() -> None: - assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/ibm/tests/unit_tests/test_llms.py b/libs/partners/ibm/tests/unit_tests/test_llms.py deleted file mode 100644 index 5831ff22ac3..00000000000 --- a/libs/partners/ibm/tests/unit_tests/test_llms.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Test WatsonxLLM API wrapper.""" - -import os - -from langchain_ibm import WatsonxLLM - -os.environ.pop("WATSONX_APIKEY", None) -os.environ.pop("WATSONX_PROJECT_ID", None) - - -def test_initialize_watsonxllm_bad_path_without_url() -> None: - try: - WatsonxLLM( - model_id="google/flan-ul2", - ) - except ValueError as e: - assert "WATSONX_URL" in e.__str__() - - -def test_initialize_watsonxllm_cloud_bad_path() -> None: - try: - WatsonxLLM(model_id="google/flan-ul2", url="https://us-south.ml.cloud.ibm.com") # type: ignore[arg-type] - except ValueError as e: - assert "WATSONX_APIKEY" in e.__str__() - - -def test_initialize_watsonxllm_cpd_bad_path_without_all() -> None: - try: - WatsonxLLM( - model_id="google/flan-ul2", - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - ) - except ValueError as e: - assert ( - "WATSONX_APIKEY" in e.__str__() - and "WATSONX_PASSWORD" in e.__str__() - and "WATSONX_TOKEN" in e.__str__() - ) - - -def test_initialize_watsonxllm_cpd_bad_path_password_without_username() -> None: - try: - WatsonxLLM( - model_id="google/flan-ul2", - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - password="test_password", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__() - - -def test_initialize_watsonxllm_cpd_bad_path_apikey_without_username() -> None: - try: - WatsonxLLM( - model_id="google/flan-ul2", - url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", # type: ignore[arg-type] - apikey="test_apikey", # type: ignore[arg-type] - ) - except ValueError as e: - assert "WATSONX_USERNAME" in e.__str__()