mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-21 14:43:07 +00:00
.
This commit is contained in:
@@ -72,7 +72,7 @@ See [supported integrations](/docs/integrations/chat/) for details on getting st
|
||||
|
||||
### Example selectors
|
||||
|
||||
[Example Selectors](/docs/concepts/example_selectors) are responsible for selecting the correct few shot examples to pass to the prompt.
|
||||
[Example Selectors](/docs/concepts/example_selectors) are responsible for selecting the correct few-shot examples to pass to the prompt.
|
||||
|
||||
- [How to: use example selectors](/docs/how_to/example_selectors)
|
||||
- [How to: select examples by length](/docs/how_to/example_selectors_length_based)
|
||||
@@ -168,7 +168,7 @@ See [supported integrations](/docs/integrations/vectorstores/) for details on ge
|
||||
|
||||
Indexing is the process of keeping your vectorstore in-sync with the underlying data source.
|
||||
|
||||
- [How to: reindex data to keep your vectorstore in sync with the underlying data source](/docs/how_to/indexing)
|
||||
- [How to: reindex data to keep your vectorstore in-sync with the underlying data source](/docs/how_to/indexing)
|
||||
|
||||
### Tools
|
||||
|
||||
|
||||
@@ -508,7 +508,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
|
||||
**kwargs: Any,
|
||||
) -> Iterator[AIMessageChunk]:
|
||||
if not self._should_stream(async_api=False, **{**kwargs, "stream": True}):
|
||||
# model doesn't implement streaming, so use default implementation
|
||||
# Model doesn't implement streaming, so use default implementation
|
||||
yield cast(
|
||||
"AIMessageChunk",
|
||||
self.invoke(input, config=config, stop=stop, **kwargs),
|
||||
@@ -1284,6 +1284,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> Iterator[ChatGenerationChunk]:
|
||||
# We expect that subclasses implement this method if they support streaming.
|
||||
raise NotImplementedError
|
||||
|
||||
async def _astream(
|
||||
|
||||
@@ -39,7 +39,6 @@ class InputTokenDetails(TypedDict, total=False):
|
||||
Does *not* need to sum to full input token count. Does *not* need to have all keys.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
@@ -75,7 +74,6 @@ class OutputTokenDetails(TypedDict, total=False):
|
||||
Does *not* need to sum to full output token count. Does *not* need to have all keys.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
@@ -103,7 +101,6 @@ class UsageMetadata(TypedDict):
|
||||
This is a standard representation of token usage that is consistent across models.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
|
||||
@@ -15,14 +15,14 @@ from langchain_core.utils._merge import merge_dicts
|
||||
class ChatGeneration(Generation):
|
||||
"""A single chat generation output.
|
||||
|
||||
A subclass of Generation that represents the response from a chat model
|
||||
A subclass of ``Generation`` that represents the response from a chat model
|
||||
that generates chat messages.
|
||||
|
||||
The `message` attribute is a structured representation of the chat message.
|
||||
Most of the time, the message will be of type `AIMessage`.
|
||||
The ``message`` attribute is a structured representation of the chat message.
|
||||
Most of the time, the message will be of type ``AIMessage``.
|
||||
|
||||
Users working with chat models will usually access information via either
|
||||
`AIMessage` (returned from runnable interfaces) or `LLMResult` (available
|
||||
``AIMessage`` (returned from runnable interfaces) or ``LLMResult`` (available
|
||||
via callbacks).
|
||||
"""
|
||||
|
||||
@@ -31,6 +31,7 @@ class ChatGeneration(Generation):
|
||||
|
||||
.. warning::
|
||||
SHOULD NOT BE SET DIRECTLY!
|
||||
|
||||
"""
|
||||
message: BaseMessage
|
||||
"""The message output by the chat model."""
|
||||
@@ -50,6 +51,7 @@ class ChatGeneration(Generation):
|
||||
|
||||
Raises:
|
||||
ValueError: If the message is not a string or a list.
|
||||
|
||||
"""
|
||||
text = ""
|
||||
if isinstance(self.message.content, str):
|
||||
@@ -69,9 +71,9 @@ class ChatGeneration(Generation):
|
||||
|
||||
|
||||
class ChatGenerationChunk(ChatGeneration):
|
||||
"""ChatGeneration chunk.
|
||||
"""``ChatGeneration`` chunk.
|
||||
|
||||
ChatGeneration chunks can be concatenated with other ChatGeneration chunks.
|
||||
``ChatGeneration`` chunks can be concatenated with other ``ChatGeneration`` chunks.
|
||||
"""
|
||||
|
||||
message: BaseMessageChunk
|
||||
@@ -83,11 +85,11 @@ class ChatGenerationChunk(ChatGeneration):
|
||||
def __add__(
|
||||
self, other: Union[ChatGenerationChunk, list[ChatGenerationChunk]]
|
||||
) -> ChatGenerationChunk:
|
||||
"""Concatenate two ChatGenerationChunks.
|
||||
"""Concatenate two ``ChatGenerationChunks``.
|
||||
|
||||
Args:
|
||||
other: The other ChatGenerationChunk or list of ChatGenerationChunks to
|
||||
concatenate.
|
||||
other: The other ``ChatGenerationChunk`` or list of ``ChatGenerationChunk``s
|
||||
to concatenate.
|
||||
"""
|
||||
if isinstance(other, ChatGenerationChunk):
|
||||
generation_info = merge_dicts(
|
||||
@@ -116,7 +118,7 @@ class ChatGenerationChunk(ChatGeneration):
|
||||
def merge_chat_generation_chunks(
|
||||
chunks: list[ChatGenerationChunk],
|
||||
) -> Union[ChatGenerationChunk, None]:
|
||||
"""Merge a list of ChatGenerationChunks into a single ChatGenerationChunk."""
|
||||
"""Merge list of ``ChatGenerationChunk``s into a single ``ChatGenerationChunk``."""
|
||||
if not chunks:
|
||||
return None
|
||||
|
||||
|
||||
@@ -107,8 +107,12 @@ class ImageURL(TypedDict, total=False):
|
||||
"""Image URL."""
|
||||
|
||||
detail: Literal["auto", "low", "high"]
|
||||
"""Specifies the detail level of the image. Defaults to "auto".
|
||||
Can be "auto", "low", or "high"."""
|
||||
"""Specifies the detail level of the image. Defaults to ``'auto'``.
|
||||
Can be ``'auto'``, ``'low'``, or ``'high'``.
|
||||
|
||||
This follows OpenAI's Chat Completion API's image URL format.
|
||||
|
||||
"""
|
||||
|
||||
url: str
|
||||
"""Either a URL of the image or the base64 encoded image data."""
|
||||
|
||||
Reference in New Issue
Block a user