mirror of
https://github.com/hwchase17/langchain.git
synced 2025-04-27 19:46:55 +00:00
Adding Perplexity
extra and deprecating the community version of ChatPerplexity
(#30649)
Plus, some accompanying docs updates Some compelling usage: ```py from langchain_perplexity import ChatPerplexity chat = ChatPerplexity(model="llama-3.1-sonar-small-128k-online") response = chat.invoke( "What were the most significant newsworthy events that occurred in the US recently?", extra_body={"search_recency_filter": "week"}, ) print(response.content) # > Here are the top significant newsworthy events in the US recently: ... ``` Also, some confirmation of structured outputs: ```py from langchain_perplexity import ChatPerplexity from pydantic import BaseModel class AnswerFormat(BaseModel): first_name: str last_name: str year_of_birth: int num_seasons_in_nba: int messages = [ {"role": "system", "content": "Be precise and concise."}, { "role": "user", "content": ( "Tell me about Michael Jordan. " "Please output a JSON object containing the following fields: " "first_name, last_name, year_of_birth, num_seasons_in_nba. " ), }, ] llm = ChatPerplexity(model="llama-3.1-sonar-small-128k-online") structured_llm = llm.with_structured_output(AnswerFormat) response = structured_llm.invoke(messages) print(repr(response)) #> AnswerFormat(first_name='Michael', last_name='Jordan', year_of_birth=1963, num_seasons_in_nba=15) ```
This commit is contained in:
parent
b8929e3d5f
commit
af66ab098e
@ -17,12 +17,66 @@
|
||||
"source": [
|
||||
"# ChatPerplexity\n",
|
||||
"\n",
|
||||
"This notebook covers how to get started with `Perplexity` chat models."
|
||||
"\n",
|
||||
"This page will help you get started with Perplexity [chat models](../../concepts/chat_models.mdx). For detailed documentation of all `ChatPerplexity` features and configurations head to the [API reference](https://python.langchain.com/api_reference/perplexity/chat_models/langchain_perplexity.chat_models.ChatPerplexity.html).\n",
|
||||
"\n",
|
||||
"## Overview\n",
|
||||
"### Integration details\n",
|
||||
"\n",
|
||||
"| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/chat/xai) | Package downloads | Package latest |\n",
|
||||
"| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| [ChatPerplexity](https://python.langchain.com/api_reference/perplexity/chat_models/langchain_perplexity.chat_models.ChatPerplexity.html) | [langchain-perplexity](https://python.langchain.com/api_reference/perplexity/perplexity.html) | ❌ | beta | ❌ |  |  |\n",
|
||||
"\n",
|
||||
"### Model features\n",
|
||||
"| [Tool calling](../../how_to/tool_calling.ipynb) | [Structured output](../../how_to/structured_output.ipynb) | JSON mode | [Image input](../../how_to/multimodal_inputs.ipynb) | Audio input | Video input | [Token-level streaming](../../how_to/chat_streaming.ipynb) | Native async | [Token usage](../../how_to/chat_token_usage_tracking.ipynb) | [Logprobs](../../how_to/logprobs.ipynb) |\n",
|
||||
"| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ |\n",
|
||||
"\n",
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"To access Perplexity models you'll need to create a Perplexity account, get an API key, and install the `langchain-perplexity` integration package.\n",
|
||||
"\n",
|
||||
"### Credentials\n",
|
||||
"\n",
|
||||
"Head to [this page](https://www.perplexity.ai/) to sign up for Perplexity and generate an API key. Once you've done this set the `PPLX_API_KEY` environment variable:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": null,
|
||||
"id": "2243f329",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if \"PPLX_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"PPLX_API_KEY\"] = getpass.getpass(\"Enter your Perplexity API key: \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "7dfe47c4",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "10a791fa",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")\n",
|
||||
"# os.environ[\"LANGSMITH_TRACING\"] = \"true\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d4a7c55d-b235-4ca4-a579-c90cc9570da9",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
@ -33,8 +87,8 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.chat_models import ChatPerplexity\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate"
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_perplexity import ChatPerplexity"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -62,29 +116,9 @@
|
||||
"id": "97a8ce3a",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The code provided assumes that your PPLX_API_KEY is set in your environment variables. If you would like to manually specify your API key and also choose a different model, you can use the following code:\n",
|
||||
"\n",
|
||||
"```python\n",
|
||||
"chat = ChatPerplexity(temperature=0, pplx_api_key=\"YOUR_API_KEY\", model=\"llama-3.1-sonar-small-128k-online\")\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"You can check a list of available models [here](https://docs.perplexity.ai/docs/model-cards). For reproducibility, we can set the API key dynamically by taking it as an input in this notebook."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "d3e49d78",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"PPLX_API_KEY = getpass()\n",
|
||||
"os.environ[\"PPLX_API_KEY\"] = PPLX_API_KEY"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
@ -305,7 +339,7 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
@ -319,7 +353,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.4"
|
||||
"version": "3.11.11"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
@ -8,18 +8,18 @@
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
Install a Python package:
|
||||
Install the Perplexity x LangChain integration package:
|
||||
|
||||
```bash
|
||||
pip install openai
|
||||
pip install langchain-perplexity
|
||||
````
|
||||
|
||||
Get your API key from [here](https://docs.perplexity.ai/docs/getting-started).
|
||||
|
||||
## Chat models
|
||||
|
||||
See a [usage example](/docs/integrations/chat/perplexity).
|
||||
See a variety of usage examples [here](/docs/integrations/chat/perplexity).
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import ChatPerplexity
|
||||
from langchain_perplexity import ChatPerplexity
|
||||
```
|
||||
|
@ -18,6 +18,7 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import LanguageModelInput
|
||||
from langchain_core.language_models.chat_models import (
|
||||
@ -71,6 +72,11 @@ def _create_usage_metadata(token_usage: dict) -> UsageMetadata:
|
||||
)
|
||||
|
||||
|
||||
@deprecated(
|
||||
since="0.3.21",
|
||||
removal="1.0",
|
||||
alternative_import="langchain_perplexity.ChatPerplexity",
|
||||
)
|
||||
class ChatPerplexity(BaseChatModel):
|
||||
"""`Perplexity AI` Chat models API.
|
||||
|
||||
|
@ -38,6 +38,7 @@ groq = ["langchain-groq"]
|
||||
aws = ["langchain-aws"]
|
||||
deepseek = ["langchain-deepseek"]
|
||||
xai = ["langchain-xai"]
|
||||
perplexity = ["langchain-perplexity"]
|
||||
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
|
||||
|
@ -1,4 +1,5 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.9, <4.0"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy'",
|
||||
@ -2400,6 +2401,9 @@ ollama = [
|
||||
openai = [
|
||||
{ name = "langchain-openai" },
|
||||
]
|
||||
perplexity = [
|
||||
{ name = "langchain-perplexity" },
|
||||
]
|
||||
together = [
|
||||
{ name = "langchain-together" },
|
||||
]
|
||||
@ -2493,6 +2497,7 @@ requires-dist = [
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" },
|
||||
{ name = "langchain-perplexity", marker = "extra == 'perplexity'" },
|
||||
{ name = "langchain-text-splitters", editable = "../text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langchain-xai", marker = "extra == 'xai'" },
|
||||
@ -2502,6 +2507,7 @@ requires-dist = [
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
{ name = "sqlalchemy", specifier = ">=1.4,<3" },
|
||||
]
|
||||
provides-extras = ["community", "anthropic", "openai", "azure-ai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
@ -2659,7 +2665,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.49"
|
||||
version = "0.3.50"
|
||||
source = { editable = "../core" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
@ -2836,7 +2842,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.11"
|
||||
version = "0.3.12"
|
||||
source = { editable = "../partners/openai" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
@ -2883,6 +2889,19 @@ typing = [
|
||||
{ name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-perplexity"
|
||||
version = "0.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/66/ccdce60458111a1118d8107c106c65fa40e93fe358c61f9cb2b87b9a5eca/langchain_perplexity-0.1.0.tar.gz", hash = "sha256:947648a223cf1cbbc5b26efcf29d5483443be222fd803da1cff9c2923d6063d6", size = 10069 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/82/d6bad72aed8dde4c3cdba5e51554f8978fc5d1ec43ba2f23611474673079/langchain_perplexity-0.1.0-py3-none-any.whl", hash = "sha256:7e18fa4c28e25240a9e57c613bf75737d2767d63dc92979de2db51f4e40127e0", size = 8669 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-tests"
|
||||
version = "0.3.17"
|
||||
|
@ -567,4 +567,7 @@ packages:
|
||||
repo: vectara/langchain-vectara
|
||||
- name: langchain-oxylabs
|
||||
path: .
|
||||
repo: oxylabs/langchain-oxylabs
|
||||
repo: oxylabs/langchain-oxylabs
|
||||
- name: langchain-perplexity
|
||||
path: libs/partners/perplexity
|
||||
repo: langchain-ai/langchain
|
||||
|
3
uv.lock
3
uv.lock
@ -2184,6 +2184,7 @@ requires-dist = [
|
||||
{ name = "langchain-mistralai", marker = "extra == 'mistralai'" },
|
||||
{ name = "langchain-ollama", marker = "extra == 'ollama'" },
|
||||
{ name = "langchain-openai", marker = "extra == 'openai'", editable = "libs/partners/openai" },
|
||||
{ name = "langchain-perplexity", marker = "extra == 'perplexity'" },
|
||||
{ name = "langchain-text-splitters", editable = "libs/text-splitters" },
|
||||
{ name = "langchain-together", marker = "extra == 'together'" },
|
||||
{ name = "langchain-xai", marker = "extra == 'xai'" },
|
||||
@ -2193,7 +2194,7 @@ requires-dist = [
|
||||
{ name = "requests", specifier = ">=2,<3" },
|
||||
{ name = "sqlalchemy", specifier = ">=1.4,<3" },
|
||||
]
|
||||
provides-extras = ["community", "anthropic", "openai", "azure-ai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai"]
|
||||
provides-extras = ["community", "anthropic", "openai", "azure-ai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
|
||||
|
Loading…
Reference in New Issue
Block a user