mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-03 12:07:36 +00:00
feat(perplexity): expose search_results
in chat model (#31468)
Description The Perplexity chat model already returns a search_results field, but LangChain dropped it when mapping Perplexity responses to additional_kwargs. This patch adds "search_results" to the allowed attribute lists in both _stream and _generate, so downstream code can access it just like images, citations, or related_questions. Dependencies None. The change is purely internal; no new imports or optional dependencies required. https://community.perplexity.ai/t/new-feature-search-results-field-with-richer-metadata/398 --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Mason Daugherty <github@mdrxy.com>
This commit is contained in:
@@ -74,7 +74,7 @@ class ChatPerplexity(BaseChatModel):
|
||||
|
||||
Key init args - completion params:
|
||||
model: str
|
||||
Name of the model to use. e.g. "llama-3.1-sonar-small-128k-online"
|
||||
Name of the model to use. e.g. "sonar"
|
||||
temperature: float
|
||||
Sampling temperature to use. Default is 0.7
|
||||
max_tokens: Optional[int]
|
||||
@@ -95,11 +95,9 @@ class ChatPerplexity(BaseChatModel):
|
||||
Instantiate:
|
||||
.. code-block:: python
|
||||
|
||||
from langchain_community.chat_models import ChatPerplexity
|
||||
from langchain_perplexity import ChatPerplexity
|
||||
|
||||
llm = ChatPerplexity(
|
||||
model="llama-3.1-sonar-small-128k-online", temperature=0.7
|
||||
)
|
||||
llm = ChatPerplexity(model="sonar", temperature=0.7)
|
||||
|
||||
Invoke:
|
||||
.. code-block:: python
|
||||
@@ -147,7 +145,7 @@ class ChatPerplexity(BaseChatModel):
|
||||
""" # noqa: E501
|
||||
|
||||
client: Any = None #: :meta private:
|
||||
model: str = "llama-3.1-sonar-small-128k-online"
|
||||
model: str = "sonar"
|
||||
"""Model name."""
|
||||
temperature: float = 0.7
|
||||
"""What sampling temperature to use."""
|
||||
@@ -325,7 +323,7 @@ class ChatPerplexity(BaseChatModel):
|
||||
additional_kwargs = {}
|
||||
if first_chunk:
|
||||
additional_kwargs["citations"] = chunk.get("citations", [])
|
||||
for attr in ["images", "related_questions"]:
|
||||
for attr in ["images", "related_questions", "search_results"]:
|
||||
if attr in chunk:
|
||||
additional_kwargs[attr] = chunk[attr]
|
||||
|
||||
@@ -376,7 +374,7 @@ class ChatPerplexity(BaseChatModel):
|
||||
usage_metadata = None
|
||||
|
||||
additional_kwargs = {}
|
||||
for attr in ["citations", "images", "related_questions"]:
|
||||
for attr in ["citations", "images", "related_questions", "search_results"]:
|
||||
if hasattr(response, attr):
|
||||
additional_kwargs[attr] = getattr(response, attr)
|
||||
|
||||
|
Reference in New Issue
Block a user