From 1c1a3a7415576b7f5917859b088f298b9098ad18 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Mon, 4 Mar 2024 08:33:19 -0800 Subject: [PATCH] docs: quickstart models (#18511) --- docs/docs/get_started/quickstart.mdx | 2 +- docs/docs/modules/model_io/quick_start.mdx | 4 ++-- docs/docs/use_cases/question_answering/quickstart.mdx | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/docs/get_started/quickstart.mdx b/docs/docs/get_started/quickstart.mdx index 2f73bbd24e6..04ec3ee9321 100644 --- a/docs/docs/get_started/quickstart.mdx +++ b/docs/docs/get_started/quickstart.mdx @@ -134,7 +134,7 @@ We can then initialize the model: ```python from langchain_anthropic import ChatAnthropic -llm = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024) +llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class: diff --git a/docs/docs/modules/model_io/quick_start.mdx b/docs/docs/modules/model_io/quick_start.mdx index 872378234e6..7217c284d55 100644 --- a/docs/docs/modules/model_io/quick_start.mdx +++ b/docs/docs/modules/model_io/quick_start.mdx @@ -35,7 +35,7 @@ from langchain_openai import ChatOpenAI from langchain_openai import OpenAI llm = OpenAI() -chat_model = ChatOpenAI() +chat_model = ChatOpenAI(model="gpt-3.5-turbo-0125") ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class: @@ -84,7 +84,7 @@ We can then initialize the model: ```python from langchain_anthropic import ChatAnthropic -chat_model = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024) +chat_model = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ``` If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class: diff --git a/docs/docs/use_cases/question_answering/quickstart.mdx b/docs/docs/use_cases/question_answering/quickstart.mdx index e216ba6c8a7..12f1e0e0bd7 100644 --- a/docs/docs/use_cases/question_answering/quickstart.mdx +++ b/docs/docs/use_cases/question_answering/quickstart.mdx @@ -480,7 +480,7 @@ import TabItem from '@theme/TabItem'; ```python from langchain_openai import ChatOpenAI -llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) +llm = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0) ``` @@ -494,7 +494,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) ```python from langchain_anthropic import ChatAnthropic -llm = ChatAnthropic(model="claude-2.1", temperature=0, max_tokens=1024) +llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024) ```