mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-01 19:12:42 +00:00
docs: clean up init_chat_model (#26551)
This commit is contained in:
@@ -98,48 +98,40 @@ def init_chat_model(
|
||||
|
||||
Must have the integration package corresponding to the model provider installed.
|
||||
|
||||
.. versionadded:: 0.2.7
|
||||
|
||||
.. versionchanged:: 0.2.8
|
||||
|
||||
Support for ``configurable_fields`` and ``config_prefix`` added.
|
||||
|
||||
.. versionchanged:: 0.2.12
|
||||
|
||||
Support for Ollama via langchain-ollama package added. Previously
|
||||
langchain-community version of Ollama (now deprecated) was installed by default.
|
||||
|
||||
Args:
|
||||
model: The name of the model, e.g. "gpt-4o", "claude-3-opus-20240229".
|
||||
model_provider: The model provider. Supported model_provider values and the
|
||||
corresponding integration package:
|
||||
- openai (langchain-openai)
|
||||
- anthropic (langchain-anthropic)
|
||||
- azure_openai (langchain-openai)
|
||||
- google_vertexai (langchain-google-vertexai)
|
||||
- google_genai (langchain-google-genai)
|
||||
- bedrock (langchain-aws)
|
||||
- cohere (langchain-cohere)
|
||||
- fireworks (langchain-fireworks)
|
||||
- together (langchain-together)
|
||||
- mistralai (langchain-mistralai)
|
||||
- huggingface (langchain-huggingface)
|
||||
- groq (langchain-groq)
|
||||
- ollama (langchain-ollama) [support added in langchain==0.2.12]
|
||||
|
||||
- openai (langchain-openai)
|
||||
- anthropic (langchain-anthropic)
|
||||
- azure_openai (langchain-openai)
|
||||
- google_vertexai (langchain-google-vertexai)
|
||||
- google_genai (langchain-google-genai)
|
||||
- bedrock (langchain-aws)
|
||||
- cohere (langchain-cohere)
|
||||
- fireworks (langchain-fireworks)
|
||||
- together (langchain-together)
|
||||
- mistralai (langchain-mistralai)
|
||||
- huggingface (langchain-huggingface)
|
||||
- groq (langchain-groq)
|
||||
- ollama (langchain-ollama) [support added in langchain==0.2.12]
|
||||
|
||||
Will attempt to infer model_provider from model if not specified. The
|
||||
following providers will be inferred based on these model prefixes:
|
||||
- gpt-3... or gpt-4... -> openai
|
||||
- claude... -> anthropic
|
||||
- amazon.... -> bedrock
|
||||
- gemini... -> google_vertexai
|
||||
- command... -> cohere
|
||||
- accounts/fireworks... -> fireworks
|
||||
|
||||
- gpt-3... or gpt-4... -> openai
|
||||
- claude... -> anthropic
|
||||
- amazon.... -> bedrock
|
||||
- gemini... -> google_vertexai
|
||||
- command... -> cohere
|
||||
- accounts/fireworks... -> fireworks
|
||||
configurable_fields: Which model parameters are
|
||||
configurable:
|
||||
- None: No configurable fields.
|
||||
- "any": All fields are configurable. *See Security Note below.*
|
||||
- Union[List[str], Tuple[str, ...]]: Specified fields are configurable.
|
||||
|
||||
- None: No configurable fields.
|
||||
- "any": All fields are configurable. *See Security Note below.*
|
||||
- Union[List[str], Tuple[str, ...]]: Specified fields are configurable.
|
||||
|
||||
Fields are assumed to have config_prefix stripped if there is a
|
||||
config_prefix. If model is specified, then defaults to None. If model is
|
||||
@@ -168,7 +160,9 @@ def init_chat_model(
|
||||
ValueError: If model_provider cannot be inferred or isn't supported.
|
||||
ImportError: If the model provider integration package is not installed.
|
||||
|
||||
Initialize non-configurable models:
|
||||
.. dropdown:: Init non-configurable model
|
||||
:open:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# pip install langchain langchain-openai langchain-anthropic langchain-google-vertexai
|
||||
@@ -183,7 +177,8 @@ def init_chat_model(
|
||||
gemini_15.invoke("what's your name")
|
||||
|
||||
|
||||
Create a partially configurable model with no default model:
|
||||
.. dropdown:: Partially configurable model with no default
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# pip install langchain langchain-openai langchain-anthropic
|
||||
@@ -204,7 +199,8 @@ def init_chat_model(
|
||||
)
|
||||
# claude-3.5 sonnet response
|
||||
|
||||
Create a fully configurable model with a default model and a config prefix:
|
||||
.. dropdown:: Fully configurable model with a default
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# pip install langchain langchain-openai langchain-anthropic
|
||||
@@ -233,7 +229,8 @@ def init_chat_model(
|
||||
)
|
||||
# Claude-3.5 sonnet response with temperature 0.6
|
||||
|
||||
Bind tools to a configurable model:
|
||||
.. dropdown:: Bind tools to a configurable model
|
||||
|
||||
You can call any ChatModel declarative methods on a configurable model in the
|
||||
same way that you would with a normal model.
|
||||
|
||||
@@ -270,6 +267,18 @@ def init_chat_model(
|
||||
config={"configurable": {"model": "claude-3-5-sonnet-20240620"}}
|
||||
)
|
||||
# Claude-3.5 sonnet response with tools
|
||||
|
||||
.. versionadded:: 0.2.7
|
||||
|
||||
.. versionchanged:: 0.2.8
|
||||
|
||||
Support for ``configurable_fields`` and ``config_prefix`` added.
|
||||
|
||||
.. versionchanged:: 0.2.12
|
||||
|
||||
Support for Ollama via langchain-ollama package added. Previously
|
||||
langchain-community version of Ollama (now deprecated) was installed by default.
|
||||
|
||||
""" # noqa: E501
|
||||
if not model and not configurable_fields:
|
||||
configurable_fields = ("model", "model_provider")
|
||||
|
Reference in New Issue
Block a user