community[patch]: docstrings update (#20301)

Added missed docstrings. Format docstings to the consistent form.
This commit is contained in:
Leonid Ganeline 2024-04-11 13:23:27 -07:00 committed by GitHub
parent 2900720cd3
commit 7cf2d2759d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
77 changed files with 162 additions and 125 deletions

View File

@ -9,7 +9,7 @@ from langchain_community.tools.connery import ConneryService
class ConneryToolkit(BaseToolkit): class ConneryToolkit(BaseToolkit):
""" """
A LangChain Toolkit with a list of Connery Actions as tools. Toolkit with a list of Connery Actions as tools.
""" """
tools: List[BaseTool] tools: List[BaseTool]

View File

@ -348,7 +348,7 @@ def create_openapi_agent(
allow_dangerous_requests: bool = False, allow_dangerous_requests: bool = False,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Instantiate OpenAI API planner and controller for a given spec. """Construct an OpenAI API planner and controller for a given spec.
Inject credentials via requests_wrapper. Inject credentials via requests_wrapper.

View File

@ -91,7 +91,7 @@ def analyze_text(
class FlyteCallbackHandler(BaseMetadataCallbackHandler, BaseCallbackHandler): class FlyteCallbackHandler(BaseMetadataCallbackHandler, BaseCallbackHandler):
"""This callback handler that is used within a Flyte task.""" """Callback handler that is used within a Flyte task."""
def __init__(self) -> None: def __init__(self) -> None:
"""Initialize callback handler.""" """Initialize callback handler."""

View File

@ -9,14 +9,14 @@ if TYPE_CHECKING:
class ChildType(Enum): class ChildType(Enum):
"""The enumerator of the child type.""" """Enumerator of the child type."""
MARKDOWN = "MARKDOWN" MARKDOWN = "MARKDOWN"
EXCEPTION = "EXCEPTION" EXCEPTION = "EXCEPTION"
class ChildRecord(NamedTuple): class ChildRecord(NamedTuple):
"""The child record as a NamedTuple.""" """Child record as a NamedTuple."""
type: ChildType type: ChildType
kwargs: Dict[str, Any] kwargs: Dict[str, Any]
@ -24,7 +24,7 @@ class ChildRecord(NamedTuple):
class MutableExpander: class MutableExpander:
"""A Streamlit expander that can be renamed and dynamically expanded/collapsed.""" """Streamlit expander that can be renamed and dynamically expanded/collapsed."""
def __init__(self, parent_container: DeltaGenerator, label: str, expanded: bool): def __init__(self, parent_container: DeltaGenerator, label: str, expanded: bool):
"""Create a new MutableExpander. """Create a new MutableExpander.
@ -51,7 +51,7 @@ class MutableExpander:
@property @property
def label(self) -> str: def label(self) -> str:
"""The expander's label string.""" """Expander's label string."""
return self._label return self._label
@property @property

View File

@ -40,7 +40,7 @@ class LLMThoughtState(Enum):
class ToolRecord(NamedTuple): class ToolRecord(NamedTuple):
"""The tool record as a NamedTuple.""" """Tool record as a NamedTuple."""
name: str name: str
input_str: str input_str: str

View File

@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
class BaseMessageConverter(ABC): class BaseMessageConverter(ABC):
"""Class that converts BaseMessage to the SQLAlchemy model.""" """Convert BaseMessage to the SQLAlchemy model."""
@abstractmethod @abstractmethod
def from_sql_model(self, sql_message: Any) -> BaseMessage: def from_sql_model(self, sql_message: Any) -> BaseMessage:

View File

@ -196,7 +196,7 @@ _message_type_lookups = {"human": "user", "ai": "assistant"}
class BedrockChat(BaseChatModel, BedrockBase): class BedrockChat(BaseChatModel, BedrockBase):
"""A chat model that uses the Bedrock API.""" """Chat model that uses the Bedrock API."""
@property @property
def _llm_type(self) -> str: def _llm_type(self) -> str:

View File

@ -26,8 +26,7 @@ DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful, and honest assistant."
class ChatHuggingFace(BaseChatModel): class ChatHuggingFace(BaseChatModel):
""" """Hugging Face LLMs as ChatModels.
Wrapper for using Hugging Face LLM's as ChatModels.
Works with `HuggingFaceTextGenInference`, `HuggingFaceEndpoint`, Works with `HuggingFaceTextGenInference`, `HuggingFaceEndpoint`,
and `HuggingFaceHub` LLMs. and `HuggingFaceHub` LLMs.

View File

@ -161,7 +161,7 @@ def _convert_message_to_dict(message: BaseMessage) -> dict:
class ChatLiteLLM(BaseChatModel): class ChatLiteLLM(BaseChatModel):
"""A chat model that uses the LiteLLM API.""" """Chat model that uses the LiteLLM API."""
client: Any #: :meta private: client: Any #: :meta private:
model: str = "gpt-3.5-turbo" model: str = "gpt-3.5-turbo"

View File

@ -37,7 +37,7 @@ def _parse_chat_history(history: List[BaseMessage]) -> List:
class MiniMaxChat(MinimaxCommon, BaseChatModel): class MiniMaxChat(MinimaxCommon, BaseChatModel):
"""Wrapper around Minimax large language models. """MiniMax large language models.
To use, you should have the environment variable ``MINIMAX_GROUP_ID`` and To use, you should have the environment variable ``MINIMAX_GROUP_ID`` and
``MINIMAX_API_KEY`` set with your API token, or pass it as a named parameter to ``MINIMAX_API_KEY`` set with your API token, or pass it as a named parameter to

View File

@ -9,7 +9,7 @@ from langchain_community.llms.moonshot import MOONSHOT_SERVICE_URL_BASE, Moonsho
class MoonshotChat(MoonshotCommon, ChatOpenAI): # type: ignore[misc] class MoonshotChat(MoonshotCommon, ChatOpenAI): # type: ignore[misc]
"""Wrapper around Moonshot large language models. """Moonshot large language models.
To use, you should have the ``openai`` python package installed, and the To use, you should have the ``openai`` python package installed, and the
environment variable ``MOONSHOT_API_KEY`` set with your API key. environment variable ``MOONSHOT_API_KEY`` set with your API key.

View File

@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
class PaiEasChatEndpoint(BaseChatModel): class PaiEasChatEndpoint(BaseChatModel):
"""Eas LLM Service chat model API. """Alibaba Cloud PAI-EAS LLM Service chat model API.
To use, must have a deployed eas chat llm service on AliCloud. One can set the To use, must have a deployed eas chat llm service on AliCloud. One can set the
environment variable ``eas_service_url`` and ``eas_service_token`` set with your eas environment variable ``eas_service_url`` and ``eas_service_token`` set with your eas

View File

@ -159,7 +159,7 @@ def _messages_to_prompt_dict(
class ChatPremAI(BaseChatModel, BaseModel): class ChatPremAI(BaseChatModel, BaseModel):
"""Use any LLM provider with Prem and Langchain. """PremAI Chat models.
To use, you will need to have an API key. You can find your existing API Key To use, you will need to have an API key. You can find your existing API Key
or generate a new one here: https://app.premai.io/api_keys/ or generate a new one here: https://app.premai.io/api_keys/
@ -357,6 +357,7 @@ def create_prem_retry_decorator(
max_retries: int = 1, max_retries: int = 1,
run_manager: Optional[Union[CallbackManagerForLLMRun]] = None, run_manager: Optional[Union[CallbackManagerForLLMRun]] = None,
) -> Callable[[Any], Any]: ) -> Callable[[Any], Any]:
"""Create a retry decorator for PremAI API errors."""
import premai.models import premai.models
errors = [ errors = [

View File

@ -10,7 +10,7 @@ from langchain_community.llms.solar import SOLAR_SERVICE_URL_BASE, SolarCommon
class SolarChat(SolarCommon, ChatOpenAI): # type: ignore[misc] class SolarChat(SolarCommon, ChatOpenAI): # type: ignore[misc]
"""Wrapper around Solar large language models. """Solar large language models.
To use, you should have the ``openai`` python package installed, and the To use, you should have the ``openai`` python package installed, and the
environment variable ``SOLAR_API_KEY`` set with your API key. environment variable ``SOLAR_API_KEY`` set with your API key.

View File

@ -89,7 +89,7 @@ def _convert_delta_to_message_chunk(
class ChatSparkLLM(BaseChatModel): class ChatSparkLLM(BaseChatModel):
"""Wrapper around iFlyTek's Spark large language model. """iFlyTek Spark large language model.
To use, you should pass `app_id`, `api_key`, `api_secret` To use, you should pass `app_id`, `api_key`, `api_secret`
as a named parameter to the constructor OR set environment as a named parameter to the constructor OR set environment

View File

@ -61,6 +61,7 @@ logger = logging.getLogger(__name__)
def convert_dict_to_message( def convert_dict_to_message(
_dict: Mapping[str, Any], is_chunk: bool = False _dict: Mapping[str, Any], is_chunk: bool = False
) -> Union[BaseMessage, BaseMessageChunk]: ) -> Union[BaseMessage, BaseMessageChunk]:
"""Convert a dict to a message."""
role = _dict["role"] role = _dict["role"]
content = _dict["content"] content = _dict["content"]
if role == "user": if role == "user":
@ -88,6 +89,7 @@ def convert_dict_to_message(
def convert_message_chunk_to_message(message_chunk: BaseMessageChunk) -> BaseMessage: def convert_message_chunk_to_message(message_chunk: BaseMessageChunk) -> BaseMessage:
"""Convert a message chunk to a message."""
if isinstance(message_chunk, HumanMessageChunk): if isinstance(message_chunk, HumanMessageChunk):
return HumanMessage(content=message_chunk.content) return HumanMessage(content=message_chunk.content)
elif isinstance(message_chunk, AIMessageChunk): elif isinstance(message_chunk, AIMessageChunk):

View File

@ -53,7 +53,7 @@ def _parse_chat_history(history: List[BaseMessage]) -> List[Dict[str, str]]:
class ChatYandexGPT(_BaseYandexGPT, BaseChatModel): class ChatYandexGPT(_BaseYandexGPT, BaseChatModel):
"""Wrapper around YandexGPT large language models. """YandexGPT large language models.
There are two authentication options for the service account There are two authentication options for the service account
with the ``ai.languageModels.user`` role: with the ``ai.languageModels.user`` role:

View File

@ -42,6 +42,7 @@ ZHIPUAI_API_BASE = "https://open.bigmodel.cn/api/paas/v4/chat/completions"
@contextmanager @contextmanager
def connect_sse(client: Any, method: str, url: str, **kwargs: Any) -> Iterator: def connect_sse(client: Any, method: str, url: str, **kwargs: Any) -> Iterator:
"""Connect to a server-sent event stream."""
from httpx_sse import EventSource from httpx_sse import EventSource
with client.stream(method, url, **kwargs) as response: with client.stream(method, url, **kwargs) as response:
@ -52,6 +53,7 @@ def connect_sse(client: Any, method: str, url: str, **kwargs: Any) -> Iterator:
async def aconnect_sse( async def aconnect_sse(
client: Any, method: str, url: str, **kwargs: Any client: Any, method: str, url: str, **kwargs: Any
) -> AsyncIterator: ) -> AsyncIterator:
"""Async connect to a server-sent event stream."""
from httpx_sse import EventSource from httpx_sse import EventSource
async with client.stream(method, url, **kwargs) as response: async with client.stream(method, url, **kwargs) as response:

View File

@ -6,7 +6,7 @@ from langchain_community.docstore.base import Docstore
class DocstoreFn(Docstore): class DocstoreFn(Docstore):
"""Langchain Docstore via arbitrary lookup function. """Docstore via arbitrary lookup function.
This is useful when: This is useful when:
* it's expensive to construct an InMemoryDocstore/dict * it's expensive to construct an InMemoryDocstore/dict

View File

@ -9,7 +9,7 @@ from langchain_community.docstore.base import Docstore
class Wikipedia(Docstore): class Wikipedia(Docstore):
"""Wrapper around wikipedia API.""" """Wikipedia API."""
def __init__(self) -> None: def __init__(self) -> None:
"""Check that wikipedia package is installed.""" """Check that wikipedia package is installed."""

View File

@ -9,6 +9,8 @@ from langchain_core.pydantic_v1 import Field
class RerankRequest: class RerankRequest:
"""Request for reranking."""
def __init__(self, query: Any = None, passages: Any = None): def __init__(self, query: Any = None, passages: Any = None):
self.query = query self.query = query
self.passages = passages if passages is not None else [] self.passages = passages if passages is not None else []

View File

@ -29,8 +29,7 @@ class TranscriptFormat(Enum):
class AssemblyAIAudioTranscriptLoader(BaseLoader): class AssemblyAIAudioTranscriptLoader(BaseLoader):
""" """Load AssemblyAI audio transcripts.
Loader for AssemblyAI audio transcripts.
It uses the AssemblyAI API to transcribe audio files It uses the AssemblyAI API to transcribe audio files
and loads the transcribed text into one or more Documents, and loads the transcribed text into one or more Documents,
@ -110,7 +109,7 @@ class AssemblyAIAudioTranscriptLoader(BaseLoader):
class AssemblyAIAudioLoaderById(BaseLoader): class AssemblyAIAudioLoaderById(BaseLoader):
""" """
Loader for AssemblyAI audio transcripts. Load AssemblyAI audio transcripts.
It uses the AssemblyAI API to get an existing transcription It uses the AssemblyAI API to get an existing transcription
and loads the transcribed text into one or more Documents, and loads the transcribed text into one or more Documents,

View File

@ -15,7 +15,7 @@ AV_PATTERN = re.compile(r"av[0-9]+")
class BiliBiliLoader(BaseLoader): class BiliBiliLoader(BaseLoader):
""" """
Loader for fetching transcripts from BiliBili videos. Load fetching transcripts from BiliBili videos.
""" """
def __init__( def __init__(

View File

@ -21,7 +21,9 @@ def _litm_reordering(documents: List[Document]) -> List[Document]:
class LongContextReorder(BaseDocumentTransformer, BaseModel): class LongContextReorder(BaseDocumentTransformer, BaseModel):
"""Lost in the middle: """Reorder long context.
Lost in the middle:
Performance degrades when models must access relevant information Performance degrades when models must access relevant information
in the middle of long contexts. in the middle of long contexts.
See: https://arxiv.org/abs//2307.03172""" See: https://arxiv.org/abs//2307.03172"""

View File

@ -9,7 +9,8 @@ from langchain_community.tools.nuclia.tool import NucliaUnderstandingAPI
class NucliaTextTransformer(BaseDocumentTransformer): class NucliaTextTransformer(BaseDocumentTransformer):
""" """Nuclia Text Transformer.
The Nuclia Understanding API splits into paragraphs and sentences, The Nuclia Understanding API splits into paragraphs and sentences,
identifies entities, provides a summary of the text and generates identifies entities, provides a summary of the text and generates
embeddings for all sentences. embeddings for all sentences.

View File

@ -183,7 +183,7 @@ class AlephAlphaAsymmetricSemanticEmbedding(BaseModel, Embeddings):
class AlephAlphaSymmetricSemanticEmbedding(AlephAlphaAsymmetricSemanticEmbedding): class AlephAlphaSymmetricSemanticEmbedding(AlephAlphaAsymmetricSemanticEmbedding):
"""The symmetric version of the Aleph Alpha's semantic embeddings. """Symmetric version of the Aleph Alpha's semantic embeddings.
The main difference is that here, both the documents and The main difference is that here, both the documents and
queries are embedded with a SemanticRepresentation.Symmetric queries are embedded with a SemanticRepresentation.Symmetric

View File

@ -12,7 +12,7 @@ def _chunk(texts: List[str], size: int) -> Iterator[List[str]]:
class DatabricksEmbeddings(MlflowEmbeddings): class DatabricksEmbeddings(MlflowEmbeddings):
"""Wrapper around embeddings LLMs in Databricks. """Databricks embeddings.
To use, you should have the ``mlflow`` python package installed. To use, you should have the ``mlflow`` python package installed.
For more information, see https://mlflow.org/docs/latest/llms/deployments. For more information, see https://mlflow.org/docs/latest/llms/deployments.

View File

@ -13,7 +13,9 @@ logger = getLogger(__name__)
class InfinityEmbeddingsLocal(BaseModel, Embeddings): class InfinityEmbeddingsLocal(BaseModel, Embeddings):
"""Optimized Embedding models https://github.com/michaelfeil/infinity """Optimized Infinity embedding models.
https://github.com/michaelfeil/infinity
This class deploys a local Infinity instance to embed text. This class deploys a local Infinity instance to embed text.
The class requires async usage. The class requires async usage.

View File

@ -12,8 +12,7 @@ def _chunk(texts: List[str], size: int) -> Iterator[List[str]]:
class JavelinAIGatewayEmbeddings(Embeddings, BaseModel): class JavelinAIGatewayEmbeddings(Embeddings, BaseModel):
""" """Javelin AI Gateway embeddings.
Wrapper around embeddings LLMs in the Javelin AI Gateway.
To use, you should have the ``javelin_sdk`` python package installed. To use, you should have the ``javelin_sdk`` python package installed.
For more information, see https://docs.getjavelin.io For more information, see https://docs.getjavelin.io

View File

@ -13,8 +13,7 @@ def _chunk(texts: List[str], size: int) -> Iterator[List[str]]:
class MlflowAIGatewayEmbeddings(Embeddings, BaseModel): class MlflowAIGatewayEmbeddings(Embeddings, BaseModel):
""" """MLflow AI Gateway embeddings.
Wrapper around embeddings LLMs in the MLflow AI Gateway.
To use, you should have the ``mlflow[gateway]`` python package installed. To use, you should have the ``mlflow[gateway]`` python package installed.
For more information, see https://mlflow.org/docs/latest/gateway/index.html. For more information, see https://mlflow.org/docs/latest/gateway/index.html.

View File

@ -42,6 +42,8 @@ def is_endpoint_live(url: str, headers: Optional[dict], payload: Any) -> bool:
class NeMoEmbeddings(BaseModel, Embeddings): class NeMoEmbeddings(BaseModel, Embeddings):
"""NeMo embedding models."""
batch_size: int = 16 batch_size: int = 16
model: str = "NV-Embed-QA-003" model: str = "NV-Embed-QA-003"
api_endpoint_url: str = "http://localhost:8088/v1/embeddings" api_endpoint_url: str = "http://localhost:8088/v1/embeddings"

View File

@ -7,7 +7,7 @@ KG_TRIPLE_DELIMITER = "<|>"
class KnowledgeTriple(NamedTuple): class KnowledgeTriple(NamedTuple):
"""A triple in the graph.""" """Knowledge triple in the graph."""
subject: str subject: str
predicate: str predicate: str

View File

@ -8,7 +8,7 @@ NAMESPACE_UUID = uuid.UUID(int=1984)
class RecordManager(ABC): class RecordManager(ABC):
"""An abstract base class representing the interface for a record manager.""" """Abstract base class for a record manager."""
def __init__( def __init__(
self, self,

View File

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
class CloudflareWorkersAI(LLM): class CloudflareWorkersAI(LLM):
"""Langchain LLM class to help to access Cloudflare Workers AI service. """Cloudflare Workers AI service.
To use, you must provide an API token and To use, you must provide an API token and
account ID to access Cloudflare Workers AI, and account ID to access Cloudflare Workers AI, and

View File

@ -161,7 +161,7 @@ class _DatabricksClusterDriverProxyClient(_DatabricksClientBase):
def get_repl_context() -> Any: def get_repl_context() -> Any:
"""Gets the notebook REPL context if running inside a Databricks notebook. """Get the notebook REPL context if running inside a Databricks notebook.
Returns None otherwise. Returns None otherwise.
""" """
try: try:
@ -175,7 +175,7 @@ def get_repl_context() -> Any:
def get_default_host() -> str: def get_default_host() -> str:
"""Gets the default Databricks workspace hostname. """Get the default Databricks workspace hostname.
Raises an error if the hostname cannot be automatically determined. Raises an error if the hostname cannot be automatically determined.
""" """
host = os.getenv("DATABRICKS_HOST") host = os.getenv("DATABRICKS_HOST")
@ -195,7 +195,7 @@ def get_default_host() -> str:
def get_default_api_token() -> str: def get_default_api_token() -> str:
"""Gets the default Databricks personal access token. """Get the default Databricks personal access token.
Raises an error if the token cannot be automatically determined. Raises an error if the token cannot be automatically determined.
""" """
if api_token := os.getenv("DATABRICKS_TOKEN"): if api_token := os.getenv("DATABRICKS_TOKEN"):

View File

@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
class EdenAI(LLM): class EdenAI(LLM):
"""Wrapper around edenai models. """EdenAI models.
To use, you should have To use, you should have
the environment variable ``EDENAI_API_KEY`` set with your API token. the environment variable ``EDENAI_API_KEY`` set with your API token.

View File

@ -33,9 +33,7 @@ def _collect_user_input(
class HumanInputLLM(LLM): class HumanInputLLM(LLM):
""" """User input as the response."""
It returns user input as the response.
"""
input_func: Callable = Field(default_factory=lambda: _collect_user_input) input_func: Callable = Field(default_factory=lambda: _collect_user_input)
prompt_func: Callable[[str], None] = Field(default_factory=lambda: _display_prompt) prompt_func: Callable[[str], None] = Field(default_factory=lambda: _display_prompt)

View File

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
class IpexLLM(LLM): class IpexLLM(LLM):
"""Wrapper around the IpexLLM model """IpexLLM model.
Example: Example:
.. code-block:: python .. code-block:: python

View File

@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
class Konko(LLM): class Konko(LLM):
"""Wrapper around Konko AI models. """Konko AI models.
To use, you'll need an API key. This can be passed in as init param To use, you'll need an API key. This can be passed in as init param
``konko_api_key`` or set as environment variable ``KONKO_API_KEY``. ``konko_api_key`` or set as environment variable ``KONKO_API_KEY``.

View File

@ -9,6 +9,14 @@ logger = logging.getLogger(__name__)
def default_guardrail_violation_handler(violation: dict) -> str: def default_guardrail_violation_handler(violation: dict) -> str:
"""Default guardrail violation handler.
Args:
violation (dict): The violation dictionary.
Returns:
str: The canned response.
"""
if violation.get("canned_response"): if violation.get("canned_response"):
return violation["canned_response"] return violation["canned_response"]
guardrail_name = ( guardrail_name = (
@ -22,6 +30,8 @@ def default_guardrail_violation_handler(violation: dict) -> str:
class LayerupSecurity(LLM): class LayerupSecurity(LLM):
"""Layerup Security LLM service."""
llm: LLM llm: LLM
layerup_api_key: str layerup_api_key: str
layerup_api_base_url: str = "https://api.uselayerup.com/v1" layerup_api_base_url: str = "https://api.uselayerup.com/v1"

View File

@ -34,6 +34,7 @@ def load_llm_from_config(config: dict, **kwargs: Any) -> BaseLLM:
def load_llm(file: Union[str, Path], **kwargs: Any) -> BaseLLM: def load_llm(file: Union[str, Path], **kwargs: Any) -> BaseLLM:
"""Load LLM from a file."""
# Convert file to Path object. # Convert file to Path object.
if isinstance(file, str): if isinstance(file, str):
file_path = Path(file) file_path = Path(file)

View File

@ -9,7 +9,7 @@ from langchain_core.pydantic_v1 import Field, PrivateAttr
class Mlflow(LLM): class Mlflow(LLM):
"""Wrapper around completions LLMs in MLflow. """MLflow LLM service.
To use, you should have the `mlflow[genai]` python package installed. To use, you should have the `mlflow[genai]` python package installed.
For more information, see https://mlflow.org/docs/latest/llms/deployments. For more information, see https://mlflow.org/docs/latest/llms/deployments.

View File

@ -21,8 +21,7 @@ class Params(BaseModel, extra=Extra.allow): # type: ignore[call-arg]
class MlflowAIGateway(LLM): class MlflowAIGateway(LLM):
""" """MLflow AI Gateway LLMs.
Wrapper around completions LLMs in the MLflow AI Gateway.
To use, you should have the ``mlflow[gateway]`` python package installed. To use, you should have the ``mlflow[gateway]`` python package installed.
For more information, see https://mlflow.org/docs/latest/gateway/index.html. For more information, see https://mlflow.org/docs/latest/gateway/index.html.

View File

@ -31,6 +31,8 @@ class _MoonshotClient(BaseModel):
class MoonshotCommon(BaseModel): class MoonshotCommon(BaseModel):
"""Common parameters for Moonshot LLMs."""
_client: _MoonshotClient _client: _MoonshotClient
base_url: str = MOONSHOT_SERVICE_URL_BASE base_url: str = MOONSHOT_SERVICE_URL_BASE
moonshot_api_key: Optional[SecretStr] = Field(default=None, alias="api_key") moonshot_api_key: Optional[SecretStr] = Field(default=None, alias="api_key")

View File

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
class OpaquePrompts(LLM): class OpaquePrompts(LLM):
"""An LLM wrapper that uses OpaquePrompts to sanitize prompts. """LLM that uses OpaquePrompts to sanitize prompts.
Wraps another LLM and sanitizes prompts before passing it to the LLM, then Wraps another LLM and sanitizes prompts before passing it to the LLM, then
de-sanitizes the response. de-sanitizes the response.

View File

@ -124,7 +124,7 @@ class PromptLayerOpenAI(OpenAI):
class PromptLayerOpenAIChat(OpenAIChat): class PromptLayerOpenAIChat(OpenAIChat):
"""Wrapper around OpenAI large language models. """PromptLayer OpenAI large language models.
To use, you should have the ``openai`` and ``promptlayer`` python To use, you should have the ``openai`` and ``promptlayer`` python
package installed, and the environment variable ``OPENAI_API_KEY`` package installed, and the environment variable ``OPENAI_API_KEY``

View File

@ -15,8 +15,7 @@ OUTPUT_TYPE = TypeVar("OUTPUT_TYPE", bound=Union[str, List[List[float]], Iterato
class LineIterator: class LineIterator:
""" """Parse the byte stream input.
A helper class for parsing the byte stream input.
The output of the model will be in the following format: The output of the model will be in the following format:
@ -74,7 +73,7 @@ class LineIterator:
class ContentHandlerBase(Generic[INPUT_TYPE, OUTPUT_TYPE]): class ContentHandlerBase(Generic[INPUT_TYPE, OUTPUT_TYPE]):
"""A handler class to transform input from LLM to a """Handler class to transform input from LLM to a
format that SageMaker endpoint expects. format that SageMaker endpoint expects.
Similarly, the class handles transforming output from the Similarly, the class handles transforming output from the

View File

@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
class SparkLLM(LLM): class SparkLLM(LLM):
"""Wrapper around iFlyTek's Spark large language model. """iFlyTek Spark large language model.
To use, you should pass `app_id`, `api_key`, `api_secret` To use, you should pass `app_id`, `api_key`, `api_secret`
as a named parameter to the constructor OR set environment as a named parameter to the constructor OR set environment

View File

@ -10,7 +10,7 @@ from langchain_community.llms.utils import enforce_stop_tokens
class TitanTakeoff(LLM): class TitanTakeoff(LLM):
"""Wrapper around Titan Takeoff APIs.""" """Titan Takeoff API LLMs."""
base_url: str = "http://localhost:8000" base_url: str = "http://localhost:8000"
"""Specifies the baseURL to use for the Titan Takeoff API. """Specifies the baseURL to use for the Titan Takeoff API.

View File

@ -40,12 +40,12 @@ stream_completion_with_retry = None
def is_codey_model(model_name: str) -> bool: def is_codey_model(model_name: str) -> bool:
"""Returns True if the model name is a Codey model.""" """Return True if the model name is a Codey model."""
return "code" in model_name return "code" in model_name
def is_gemini_model(model_name: str) -> bool: def is_gemini_model(model_name: str) -> bool:
"""Returns True if the model name is a Gemini model.""" """Return True if the model name is a Gemini model."""
return model_name is not None and "gemini" in model_name return model_name is not None and "gemini" in model_name
@ -397,7 +397,7 @@ class VertexAI(_VertexAICommon, BaseLLM):
alternative_import="langchain_google_vertexai.VertexAIModelGarden", alternative_import="langchain_google_vertexai.VertexAIModelGarden",
) )
class VertexAIModelGarden(_VertexAIBase, BaseLLM): class VertexAIModelGarden(_VertexAIBase, BaseLLM):
"""Large language models served from Vertex AI Model Garden.""" """Vertex AI Model Garden large language models."""
client: "PredictionServiceClient" = None #: :meta private: client: "PredictionServiceClient" = None #: :meta private:
async_client: "PredictionServiceAsyncClient" = None #: :meta private: async_client: "PredictionServiceAsyncClient" = None #: :meta private:

View File

@ -10,7 +10,7 @@ from langchain_community.utilities.arcee import ArceeWrapper, DALMFilter
class ArceeRetriever(BaseRetriever): class ArceeRetriever(BaseRetriever):
"""Retriever for Arcee's Domain Adapted Language Models (DALMs). """Arcee Domain Adapted Language Models (DALMs) retriever.
To use, set the ``ARCEE_API_KEY`` environment variable with your Arcee API key, To use, set the ``ARCEE_API_KEY`` environment variable with your Arcee API key,
or pass ``arcee_api_key`` as a named parameter. or pass ``arcee_api_key`` as a named parameter.

View File

@ -11,7 +11,8 @@ from langchain_community.utilities import YouSearchAPIWrapper
class YouRetriever(BaseRetriever, YouSearchAPIWrapper): class YouRetriever(BaseRetriever, YouSearchAPIWrapper):
"""`You` retriever that uses You.com's search API. """You.com Search API retriever.
It wraps results() to get_relevant_documents It wraps results() to get_relevant_documents
It uses all YouSearchAPIWrapper arguments without any change. It uses all YouSearchAPIWrapper arguments without any change.
""" """

View File

@ -8,8 +8,8 @@ from langchain_core.tools import BaseTool
class CogniswitchKnowledgeRequest(BaseTool): class CogniswitchKnowledgeRequest(BaseTool):
""" """Tool that uses the Cogniswitch service to answer questions.
A tool for interacting with the Cogniswitch service to answer questions.
name: str = "cogniswitch_knowledge_request" name: str = "cogniswitch_knowledge_request"
description: str = ( description: str = (
"A wrapper around cogniswitch service to answer the question "A wrapper around cogniswitch service to answer the question
@ -81,9 +81,9 @@ class CogniswitchKnowledgeRequest(BaseTool):
class CogniswitchKnowledgeStatus(BaseTool): class CogniswitchKnowledgeStatus(BaseTool):
""" """Tool that uses the Cogniswitch services to get the
A cogniswitch tool for interacting with the Cogniswitch services to know the
status of the document or url uploaded. status of the document or url uploaded.
name: str = "cogniswitch_knowledge_status" name: str = "cogniswitch_knowledge_status"
description: str = ( description: str = (
"A wrapper around cogniswitch services to know the status of "A wrapper around cogniswitch services to know the status of
@ -181,8 +181,8 @@ class CogniswitchKnowledgeStatus(BaseTool):
class CogniswitchKnowledgeSourceFile(BaseTool): class CogniswitchKnowledgeSourceFile(BaseTool):
""" """Tool that uses the Cogniswitch services to store data from file.
A cogniswitch tool for interacting with the Cogniswitch services to store data.
name: str = "cogniswitch_knowledge_source_file" name: str = "cogniswitch_knowledge_source_file"
description: str = ( description: str = (
"This calls the CogniSwitch services to analyze & store data from a file. "This calls the CogniSwitch services to analyze & store data from a file.
@ -294,8 +294,8 @@ class CogniswitchKnowledgeSourceFile(BaseTool):
class CogniswitchKnowledgeSourceURL(BaseTool): class CogniswitchKnowledgeSourceURL(BaseTool):
""" """Tool that uses the Cogniswitch services to store data from a URL.
A cogniswitch tool for interacting with the Cogniswitch services to store data.
name: str = "cogniswitch_knowledge_source_url" name: str = "cogniswitch_knowledge_source_url"
description: str = ( description: str = (
"This calls the CogniSwitch services to analyze & store data from a url. "This calls the CogniSwitch services to analyze & store data from a url.

View File

@ -10,8 +10,8 @@ from langchain_community.tools.connery.tool import ConneryAction
class ConneryService(BaseModel): class ConneryService(BaseModel):
""" """Service for interacting with the Connery Runner API.
A service for interacting with the Connery Runner API.
It gets the list of available actions from the Connery Runner, It gets the list of available actions from the Connery Runner,
wraps them in ConneryAction Tools and returns them to the user. wraps them in ConneryAction Tools and returns them to the user.
It also provides a method for running the actions. It also provides a method for running the actions.

View File

@ -13,9 +13,7 @@ from langchain_community.tools.connery.models import Action, Parameter
class ConneryAction(BaseTool): class ConneryAction(BaseTool):
""" """Connery Action tool."""
A LangChain Tool wrapping a Connery Action.
"""
name: str name: str
description: str description: str

View File

@ -54,7 +54,7 @@ class SearchEventsInput(BaseModel):
class O365SearchEvents(O365BaseTool): class O365SearchEvents(O365BaseTool):
"""Class for searching calendar events in Office 365 """Search calendar events in Office 365.
Free, but setup is required Free, but setup is required
""" """

View File

@ -53,9 +53,9 @@ class SearchEmailsInput(BaseModel):
class O365SearchEmails(O365BaseTool): class O365SearchEmails(O365BaseTool):
"""Class for searching email messages in Office 365 """Search email messages in Office 365.
Free, but setup is required Free, but setup is required.
""" """
name: str = "messages_search" name: str = "messages_search"

View File

@ -32,7 +32,7 @@ class SendMessageSchema(BaseModel):
class O365SendMessage(O365BaseTool): class O365SendMessage(O365BaseTool):
"""Tool for sending an email in Office 365.""" """Send an email in Office 365."""
name: str = "send_email" name: str = "send_email"
description: str = ( description: str = (

View File

@ -36,7 +36,7 @@ def clean_body(body: str) -> str:
def authenticate() -> Account: def authenticate() -> Account:
"""Authenticate using the Microsoft Grah API""" """Authenticate using the Microsoft Graph API"""
try: try:
from O365 import Account from O365 import Account
except ImportError as e: except ImportError as e:

View File

@ -12,11 +12,13 @@ from langchain_community.utilities.you import YouSearchAPIWrapper
class YouInput(BaseModel): class YouInput(BaseModel):
"""Input schema for the you.com tool."""
query: str = Field(description="should be a search query") query: str = Field(description="should be a search query")
class YouSearchTool(BaseTool): class YouSearchTool(BaseTool):
"""Tool that searches the you.com API""" """Tool that searches the you.com API."""
name = "you_search" name = "you_search"
description = ( description = (

View File

@ -82,8 +82,9 @@ from langchain_community.utilities.zapier import ZapierNLAWrapper
class ZapierNLARunAction(BaseTool): class ZapierNLARunAction(BaseTool):
""" """Tool to run a specific action from the user's exposed actions.
Args:
Params:
action_id: a specific action ID (from list actions) of the action to execute action_id: a specific action ID (from list actions) of the action to execute
(the set api_key must be associated with the action owner) (the set api_key must be associated with the action owner)
instructions: a natural language instruction string for using the action instructions: a natural language instruction string for using the action
@ -167,11 +168,7 @@ ZapierNLARunAction.__doc__ = (
class ZapierNLAListActions(BaseTool): class ZapierNLAListActions(BaseTool):
""" """Tool to list all exposed actions for the user."""
Args:
None
"""
name: str = "ZapierNLA_list_actions" name: str = "ZapierNLA_list_actions"
description: str = BASE_ZAPIER_TOOL_PROMPT + ( description: str = BASE_ZAPIER_TOOL_PROMPT + (

View File

@ -14,6 +14,8 @@ if TYPE_CHECKING:
class SetupMode(Enum): class SetupMode(Enum):
"""Setup mode for AstraDBEnvironment as enumerator."""
SYNC = 1 SYNC = 1
ASYNC = 2 ASYNC = 2
OFF = 3 OFF = 3

View File

@ -62,7 +62,7 @@ logger = logging.getLogger(__name__)
class Runtime(BaseModel): class Runtime(BaseModel):
"""This class represents a Runtime. """Pebblo Runtime.
Args: Args:
type (Optional[str]): Runtime type. Defaults to "" type (Optional[str]): Runtime type. Defaults to ""
@ -90,7 +90,7 @@ class Runtime(BaseModel):
class Framework(BaseModel): class Framework(BaseModel):
"""This class represents a Framework instance. """Pebblo Framework instance.
Args: Args:
name (str): Name of the Framework. name (str): Name of the Framework.
@ -102,7 +102,7 @@ class Framework(BaseModel):
class App(BaseModel): class App(BaseModel):
"""This class represents an AI application. """Pebblo AI application.
Args: Args:
name (str): Name of the app. name (str): Name of the app.
@ -124,7 +124,7 @@ class App(BaseModel):
class Doc(BaseModel): class Doc(BaseModel):
"""This class represents a pebblo document. """Pebblo document.
Args: Args:
name (str): Name of app originating this document. name (str): Name of app originating this document.
@ -148,8 +148,8 @@ class Doc(BaseModel):
def get_full_path(path: str) -> str: def get_full_path(path: str) -> str:
"""Return absolute local path for a local file/directory, """Return an absolute local path for a local file/directory,
for network related path, return as is. for a network related path, return as is.
Args: Args:
path (str): Relative path to be resolved. path (str): Relative path to be resolved.
@ -184,7 +184,7 @@ def get_loader_type(loader: str) -> str:
def get_loader_full_path(loader: BaseLoader) -> str: def get_loader_full_path(loader: BaseLoader) -> str:
"""Return absolute source path of source of loader based on the """Return an absolute source path of source of loader based on the
keys present in Document object from loader. keys present in Document object from loader.
Args: Args:
@ -266,7 +266,7 @@ def get_runtime() -> Tuple[Framework, Runtime]:
def get_ip() -> str: def get_ip() -> str:
"""Fetch local runtime ip address """Fetch local runtime ip address.
Returns: Returns:
str: IP address str: IP address

View File

@ -22,7 +22,7 @@ def create_retry_decorator(
Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun] Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun]
] = None, ] = None,
) -> Callable[[Any], Any]: ) -> Callable[[Any], Any]:
"""Creates a retry decorator for Vertex / Palm LLMs.""" """Create a retry decorator for Vertex / Palm LLMs."""
import google.api_core import google.api_core
errors = [ errors = [
@ -82,7 +82,7 @@ def init_vertexai(
def get_client_info(module: Optional[str] = None) -> "ClientInfo": def get_client_info(module: Optional[str] = None) -> "ClientInfo":
r"""Returns a custom user agent header. r"""Return a custom user agent header.
Args: Args:
module (Optional[str]): module (Optional[str]):
@ -109,7 +109,7 @@ def get_client_info(module: Optional[str] = None) -> "ClientInfo":
def load_image_from_gcs(path: str, project: Optional[str] = None) -> "Image": def load_image_from_gcs(path: str, project: Optional[str] = None) -> "Image":
"""Loads im Image from GCS.""" """Load an image from Google Cloud Storage."""
try: try:
from google.cloud import storage from google.cloud import storage
except ImportError: except ImportError:

View File

@ -29,7 +29,7 @@ class YouHit(YouHitMetadata):
class YouAPIOutput(BaseModel): class YouAPIOutput(BaseModel):
"""The output from you.com api""" """Output from you.com API."""
hits: List[YouHit] = Field( hits: List[YouHit] = Field(
description="A list of dictionaries containing the results" description="A list of dictionaries containing the results"
@ -37,7 +37,7 @@ class YouAPIOutput(BaseModel):
class YouDocument(BaseModel): class YouDocument(BaseModel):
"""The output of parsing one snippet""" """Output of parsing one snippet."""
page_content: str = Field(description="One snippet of text") page_content: str = Field(description="One snippet of text")
metadata: YouHitMetadata metadata: YouHitMetadata

View File

@ -28,7 +28,7 @@ def convert_pydantic_to_ernie_function(
name: Optional[str] = None, name: Optional[str] = None,
description: Optional[str] = None, description: Optional[str] = None,
) -> FunctionDescription: ) -> FunctionDescription:
"""Converts a Pydantic model to a function description for the Ernie API.""" """Convert a Pydantic model to a function description for the Ernie API."""
schema = dereference_refs(model.schema()) schema = dereference_refs(model.schema())
schema.pop("definitions", None) schema.pop("definitions", None)
return { return {
@ -44,7 +44,7 @@ def convert_pydantic_to_ernie_tool(
name: Optional[str] = None, name: Optional[str] = None,
description: Optional[str] = None, description: Optional[str] = None,
) -> ToolDescription: ) -> ToolDescription:
"""Converts a Pydantic model to a function description for the Ernie API.""" """Convert a Pydantic model to a function description for the Ernie API."""
function = convert_pydantic_to_ernie_function( function = convert_pydantic_to_ernie_function(
model, name=name, description=description model, name=name, description=description
) )

View File

@ -5,7 +5,7 @@ from typing import Any, Optional
def get_client_info(module: Optional[str] = None) -> Any: def get_client_info(module: Optional[str] = None) -> Any:
r"""Returns a custom user agent header. r"""Return a custom user agent header.
Args: Args:
module (Optional[str]): module (Optional[str]):

View File

@ -139,7 +139,7 @@ def remove_lucene_chars(text: str) -> str:
def dict_to_yaml_str(input_dict: Dict, indent: int = 0) -> str: def dict_to_yaml_str(input_dict: Dict, indent: int = 0) -> str:
""" """
Converts a dictionary to a YAML-like string without using external libraries. Convert a dictionary to a YAML-like string without using external libraries.
Parameters: Parameters:
- input_dict (dict): The dictionary to convert. - input_dict (dict): The dictionary to convert.
@ -165,6 +165,8 @@ def dict_to_yaml_str(input_dict: Dict, indent: int = 0) -> str:
def combine_queries( def combine_queries(
input_queries: List[Tuple[str, Dict[str, Any]]], operator: str input_queries: List[Tuple[str, Dict[str, Any]]], operator: str
) -> Tuple[str, Dict[str, Any]]: ) -> Tuple[str, Dict[str, Any]]:
"""Combine multiple queries with an operator."""
# Initialize variables to hold the combined query and parameters # Initialize variables to hold the combined query and parameters
combined_query: str = "" combined_query: str = ""
combined_params: Dict = {} combined_params: Dict = {}
@ -197,8 +199,7 @@ def combine_queries(
def collect_params( def collect_params(
input_data: List[Tuple[str, Dict[str, str]]], input_data: List[Tuple[str, Dict[str, str]]],
) -> Tuple[List[str], Dict[str, Any]]: ) -> Tuple[List[str], Dict[str, Any]]:
""" """Transform the input data into the desired format.
Transform the input data into the desired format.
Args: Args:
- input_data (list of tuples): Input data to transform. - input_data (list of tuples): Input data to transform.
@ -324,6 +325,15 @@ def _handle_field_filter(
def construct_metadata_filter(filter: Dict[str, Any]) -> Tuple[str, Dict]: def construct_metadata_filter(filter: Dict[str, Any]) -> Tuple[str, Dict]:
"""Construct a metadata filter.
Args:
filter: A dictionary representing the filter condition.
Returns:
Tuple[str, Dict]
"""
if isinstance(filter, dict): if isinstance(filter, dict):
if len(filter) == 1: if len(filter) == 1:
# The only operators allowed at the top level are $AND and $OR # The only operators allowed at the top level are $AND and $OR

View File

@ -100,7 +100,7 @@ def check_operator_misuse(func: Callable) -> Callable:
class RedisTag(RedisFilterField): class RedisTag(RedisFilterField):
"""A RedisFilterField representing a tag in a Redis index.""" """RedisFilterField representing a tag in a Redis index."""
OPERATORS: Dict[RedisFilterOperator, str] = { OPERATORS: Dict[RedisFilterOperator, str] = {
RedisFilterOperator.EQ: "==", RedisFilterOperator.EQ: "==",
@ -192,7 +192,7 @@ class RedisTag(RedisFilterField):
class RedisNum(RedisFilterField): class RedisNum(RedisFilterField):
"""A RedisFilterField representing a numeric field in a Redis index.""" """RedisFilterField representing a numeric field in a Redis index."""
OPERATORS: Dict[RedisFilterOperator, str] = { OPERATORS: Dict[RedisFilterOperator, str] = {
RedisFilterOperator.EQ: "==", RedisFilterOperator.EQ: "==",
@ -311,7 +311,7 @@ class RedisNum(RedisFilterField):
class RedisText(RedisFilterField): class RedisText(RedisFilterField):
"""A RedisFilterField representing a text field in a Redis index.""" """RedisFilterField representing a text field in a Redis index."""
OPERATORS: Dict[RedisFilterOperator, str] = { OPERATORS: Dict[RedisFilterOperator, str] = {
RedisFilterOperator.EQ: "==", RedisFilterOperator.EQ: "==",
@ -381,7 +381,7 @@ class RedisText(RedisFilterField):
class RedisFilterExpression: class RedisFilterExpression:
"""A logical expression of RedisFilterFields. """Logical expression of RedisFilterFields.
RedisFilterExpressions can be combined using the & and | operators to create RedisFilterExpressions can be combined using the & and | operators to create
complex logical expressions that evaluate to the Redis Query language. complex logical expressions that evaluate to the Redis Query language.

View File

@ -285,7 +285,7 @@ class RedisModel(BaseModel):
def read_schema( def read_schema(
index_schema: Optional[Union[Dict[str, List[Any]], str, os.PathLike]], index_schema: Optional[Union[Dict[str, List[Any]], str, os.PathLike]],
) -> Dict[str, Any]: ) -> Dict[str, Any]:
"""Reads in the index schema from a dict or yaml file. """Read in the index schema from a dict or yaml file.
Check if it is a dict and return RedisModel otherwise, check if it's a path and Check if it is a dict and return RedisModel otherwise, check if it's a path and
read in the file assuming it's a yaml file and return a RedisModel read in the file assuming it's a yaml file and return a RedisModel

View File

@ -42,7 +42,7 @@ class BaseSerializer(ABC):
class JsonSerializer(BaseSerializer): class JsonSerializer(BaseSerializer):
"""Serializes data in json using the json package from python standard library.""" """Serialize data in JSON using the json package from python standard library."""
@classmethod @classmethod
def extension(cls) -> str: def extension(cls) -> str:
@ -58,7 +58,7 @@ class JsonSerializer(BaseSerializer):
class BsonSerializer(BaseSerializer): class BsonSerializer(BaseSerializer):
"""Serializes data in binary json using the `bson` python package.""" """Serialize data in Binary JSON using the `bson` python package."""
def __init__(self, persist_path: str) -> None: def __init__(self, persist_path: str) -> None:
super().__init__(persist_path) super().__init__(persist_path)
@ -78,7 +78,7 @@ class BsonSerializer(BaseSerializer):
class ParquetSerializer(BaseSerializer): class ParquetSerializer(BaseSerializer):
"""Serializes data in `Apache Parquet` format using the `pyarrow` package.""" """Serialize data in `Apache Parquet` format using the `pyarrow` package."""
def __init__(self, persist_path: str) -> None: def __init__(self, persist_path: str) -> None:
super().__init__(persist_path) super().__init__(persist_path)

View File

@ -24,7 +24,8 @@ logger = logging.getLogger(__name__)
class SQLiteVSS(VectorStore): class SQLiteVSS(VectorStore):
"""Wrapper around SQLite with vss extension as a vector database. """SQLite with VSS extension as a vector database.
To use, you should have the ``sqlite-vss`` python package installed. To use, you should have the ``sqlite-vss`` python package installed.
Example: Example:
.. code-block:: python .. code-block:: python

View File

@ -10,6 +10,8 @@ DEFAULT_TiDB_VECTOR_TABLE_NAME = "langchain_vector"
class TiDBVectorStore(VectorStore): class TiDBVectorStore(VectorStore):
"""TiDB Vector Store."""
def __init__( def __init__(
self, self,
connection_string: str, connection_string: str,

View File

@ -12,7 +12,7 @@ from langchain_community.vectorstores.utils import maximal_marginal_relevance
class Vald(VectorStore): class Vald(VectorStore):
"""Wrapper around Vald vector database. """Vald vector database.
To use, you should have the ``vald-client-python`` python package installed. To use, you should have the ``vald-client-python`` python package installed.

View File

@ -77,8 +77,7 @@ def _len_check_if_sized(x: Any, y: Any, x_name: str, y_name: str) -> None:
def VDMS_Client(host: str = "localhost", port: int = 55555) -> vdms.vdms: def VDMS_Client(host: str = "localhost", port: int = 55555) -> vdms.vdms:
""" """VDMS client for the VDMS server.
Wrapper to initiate and connect a VDMS client to a VDMS server
Args: Args:
host: IP or hostname of VDMS server host: IP or hostname of VDMS server
@ -98,7 +97,7 @@ def VDMS_Client(host: str = "localhost", port: int = 55555) -> vdms.vdms:
class VDMS(VectorStore): class VDMS(VectorStore):
"""Wrapper around Intel Lab's VDMS for vector-store workloads. """Intel Lab's VDMS for vector-store workloads.
To use, you should have both: To use, you should have both:
- the ``vdms`` python package installed - the ``vdms`` python package installed
@ -1534,6 +1533,8 @@ def _check_descriptor_exists_by_id(
def embedding2bytes(embedding: Union[List[float], None]) -> Union[bytes, None]: def embedding2bytes(embedding: Union[List[float], None]) -> Union[bytes, None]:
"""Convert embedding to bytes."""
blob = None blob = None
if embedding is not None: if embedding is not None:
emb = np.array(embedding, dtype="float32") emb = np.array(embedding, dtype="float32")

View File

@ -18,7 +18,8 @@ logger = logging.getLogger(__name__)
@dataclass @dataclass
class SummaryConfig: class SummaryConfig:
""" """Configuration for summary generation.
is_enabled: True if summary is enabled, False otherwise is_enabled: True if summary is enabled, False otherwise
max_results: maximum number of results to summarize max_results: maximum number of results to summarize
response_lang: requested language for the summary response_lang: requested language for the summary
@ -34,7 +35,8 @@ class SummaryConfig:
@dataclass @dataclass
class MMRConfig: class MMRConfig:
""" """Configuration for Maximal Marginal Relevance (MMR) search.
is_enabled: True if MMR is enabled, False otherwise is_enabled: True if MMR is enabled, False otherwise
mmr_k: number of results to fetch for MMR, defaults to 50 mmr_k: number of results to fetch for MMR, defaults to 50
diversity_bias: number between 0 and 1 that determines the degree diversity_bias: number between 0 and 1 that determines the degree
@ -53,7 +55,8 @@ class MMRConfig:
@dataclass @dataclass
class VectaraQueryConfig: class VectaraQueryConfig:
""" """Configuration for Vectara query.
k: Number of Documents to return. Defaults to 10. k: Number of Documents to return. Defaults to 10.
lambda_val: lexical match parameter for hybrid search. lambda_val: lexical match parameter for hybrid search.
filter Dictionary of argument(s) to filter on metadata. For example a filter Dictionary of argument(s) to filter on metadata. For example a
@ -566,7 +569,7 @@ class Vectara(VectorStore):
class VectaraRetriever(VectorStoreRetriever): class VectaraRetriever(VectorStoreRetriever):
"""Retriever class for `Vectara`.""" """Retriever for `Vectara`."""
vectorstore: Vectara vectorstore: Vectara
"""Vectara vectorstore.""" """Vectara vectorstore."""

View File

@ -23,7 +23,8 @@ logger = logging.getLogger(__name__)
class Yellowbrick(VectorStore): class Yellowbrick(VectorStore):
"""Wrapper around Yellowbrick as a vector database. """Yellowbrick as a vector database.
Example: Example:
.. code-block:: python .. code-block:: python
from langchain_community.vectorstores import Yellowbrick from langchain_community.vectorstores import Yellowbrick