diff --git a/docs/docs/get_started/quickstart.mdx b/docs/docs/get_started/quickstart.mdx index 2f73bbd24e6..04ec3ee9321 100644 --- a/docs/docs/get_started/quickstart.mdx +++ b/docs/docs/get_started/quickstart.mdx @@ -134,7 +134,7 @@ We can then initialize the model: ```python from langchain_anthropic import ChatAnthropic -llm = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024) +llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class: diff --git a/docs/docs/modules/model_io/quick_start.mdx b/docs/docs/modules/model_io/quick_start.mdx index 872378234e6..7217c284d55 100644 --- a/docs/docs/modules/model_io/quick_start.mdx +++ b/docs/docs/modules/model_io/quick_start.mdx @@ -35,7 +35,7 @@ from langchain_openai import ChatOpenAI from langchain_openai import OpenAI llm = OpenAI() -chat_model = ChatOpenAI() +chat_model = ChatOpenAI(model="gpt-3.5-turbo-0125") ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class: @@ -84,7 +84,7 @@ We can then initialize the model: ```python from langchain_anthropic import ChatAnthropic -chat_model = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024) +chat_model = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class: diff --git a/docs/docs/use_cases/question_answering/quickstart.mdx b/docs/docs/use_cases/question_answering/quickstart.mdx index e216ba6c8a7..12f1e0e0bd7 100644 --- a/docs/docs/use_cases/question_answering/quickstart.mdx +++ b/docs/docs/use_cases/question_answering/quickstart.mdx @@ -480,7 +480,7 @@ import TabItem from '@theme/TabItem'; ```python from langchain_openai import ChatOpenAI -llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) +llm = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0) ``` @@ -494,7 +494,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) ```python from langchain_anthropic import ChatAnthropic -llm = ChatAnthropic(model="claude-2.1", temperature=0, max_tokens=1024) +llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ```