mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-31 00:29:57 +00:00
docs: quickstart models (#18511)
This commit is contained in:
parent
a727eec6ed
commit
1c1a3a7415
@ -134,7 +134,7 @@ We can then initialize the model:
|
||||
```python
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
|
||||
llm = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024)
|
||||
llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024)
|
||||
```
|
||||
|
||||
If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class:
|
||||
|
@ -35,7 +35,7 @@ from langchain_openai import ChatOpenAI
|
||||
from langchain_openai import OpenAI
|
||||
|
||||
llm = OpenAI()
|
||||
chat_model = ChatOpenAI()
|
||||
chat_model = ChatOpenAI(model="gpt-3.5-turbo-0125")
|
||||
```
|
||||
|
||||
If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class:
|
||||
@ -84,7 +84,7 @@ We can then initialize the model:
|
||||
```python
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
|
||||
chat_model = ChatAnthropic(model="claude-2.1", temperature=0.2, max_tokens=1024)
|
||||
chat_model = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024)
|
||||
```
|
||||
|
||||
If you'd prefer not to set an environment variable you can pass the key in directly via the `anthropic_api_key` named parameter when initiating the Anthropic Chat Model class:
|
||||
|
@ -480,7 +480,7 @@ import TabItem from '@theme/TabItem';
|
||||
```python
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
|
||||
llm = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0)
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -494,7 +494,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
|
||||
```python
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
|
||||
llm = ChatAnthropic(model="claude-2.1", temperature=0, max_tokens=1024)
|
||||
llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.2, max_tokens=1024)
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
|
Loading…
Reference in New Issue
Block a user