Function calling logging fixup (#8153)

Fix bad overwriting of "functions" arg in invocation params.
Cleanup precedence in the dict
Clean up some inappropriate types (mapping should be dict)


Example:
https://dev.smith.langchain.com/public/9a7a6817-1679-49d8-8775-c13916975aae/r


![image](https://github.com/langchain-ai/langchain/assets/13333726/94cd0775-b6ef-40c3-9e5a-3ab65e466ab9)
This commit is contained in:
William FH 2023-07-23 18:01:33 -07:00 committed by GitHub
parent 961a0e200f
commit c5b50be225
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 29 additions and 31 deletions

View File

@ -116,18 +116,18 @@ class AzureChatOpenAI(ChatOpenAI):
}
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
"""Get the identifying parameters."""
return {**self._default_params}
@property
def _client_params(self) -> Dict[str, Any]:
"""Get the config params used for the openai client."""
openai_creds = {
return {
**super()._client_params,
"api_type": self.openai_api_type,
"api_version": self.openai_api_version,
}
return {**super()._client_params, **openai_creds}
@property
def _llm_type(self) -> str:

View File

@ -3,7 +3,7 @@ import inspect
import warnings
from abc import ABC, abstractmethod
from functools import partial
from typing import Any, Dict, List, Mapping, Optional, Sequence
from typing import Any, Dict, List, Optional, Sequence
from pydantic import Field, root_validator
@ -424,7 +424,7 @@ class BaseChatModel(BaseLanguageModel, ABC):
return await self._call_async(messages, stop=_stop, **kwargs)
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
"""Get the identifying parameters."""
return {}

View File

@ -1,5 +1,5 @@
"""Fake ChatModel for testing purposes."""
from typing import Any, List, Mapping, Optional
from typing import Any, Dict, List, Optional
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.chat_models.base import SimpleChatModel
@ -29,5 +29,5 @@ class FakeListChatModel(SimpleChatModel):
return response
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {"responses": self.responses}

View File

@ -2,7 +2,7 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
from pydantic import BaseModel, root_validator
from tenacity import (
@ -40,8 +40,6 @@ logger = logging.getLogger(__name__)
class ChatGooglePalmError(Exception):
"""Error raised when there is an issue with the Google PaLM API."""
pass
def _truncate_at_stop_tokens(
text: str,
@ -323,7 +321,7 @@ class ChatGooglePalm(BaseChatModel, BaseModel):
return _response_to_result(response, stop)
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
"""Get the identifying parameters."""
return {
"model_name": self.model_name,

View File

@ -2,7 +2,7 @@
import asyncio
from functools import partial
from io import StringIO
from typing import Any, Callable, List, Mapping, Optional
from typing import Any, Callable, Dict, List, Mapping, Optional
import yaml
from pydantic import Field
@ -76,7 +76,7 @@ class HumanInputChatModel(BaseChatModel):
message_kwargs: Mapping[str, Any] = {}
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {
"input_func": self.input_func.__name__,
"message_func": self.message_func.__name__,

View File

@ -355,7 +355,7 @@ class ChatOpenAI(BaseChatModel):
def _create_message_dicts(
self, messages: List[BaseMessage], stop: Optional[List[str]]
) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:
params = dict(self._client_params)
params = self._client_params
if stop is not None:
if "stop" in params:
raise ValueError("`stop` found in both the input and default params.")
@ -419,12 +419,12 @@ class ChatOpenAI(BaseChatModel):
return self._create_chat_result(response)
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
"""Get the identifying parameters."""
return {**{"model_name": self.model_name}, **self._default_params}
@property
def _client_params(self) -> Mapping[str, Any]:
def _client_params(self) -> Dict[str, Any]:
"""Get the parameters used for the openai client."""
openai_creds: Dict[str, Any] = {
"api_key": self.openai_api_key,
@ -436,17 +436,17 @@ class ChatOpenAI(BaseChatModel):
import openai
openai.proxy = {"http": self.openai_proxy, "https": self.openai_proxy} # type: ignore[assignment] # noqa: E501
return {**openai_creds, **self._default_params}
return {**self._default_params, **openai_creds}
def _get_invocation_params(
self, stop: Optional[List[str]] = None, **kwargs: Any
) -> Dict[str, Any]:
"""Get the parameters used to invoke the model FOR THE CALLBACKS."""
"""Get the parameters used to invoke the model."""
return {
**super()._get_invocation_params(stop=stop, **kwargs),
**self._default_params,
"model": self.model_name,
"function": kwargs.get("functions"),
**super()._get_invocation_params(stop=stop),
**self._default_params,
**kwargs,
}
@property

View File

@ -1,6 +1,6 @@
"""PromptLayer wrapper."""
import datetime
from typing import Any, List, Mapping, Optional
from typing import Any, Dict, List, Optional
from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
@ -121,7 +121,7 @@ class PromptLayerChatOpenAI(ChatOpenAI):
return "promptlayer-openai-chat"
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {
**super()._identifying_params,
"pl_tags": self.pl_tags,

View File

@ -1,6 +1,6 @@
"""Unit tests for agents."""
from typing import Any, List, Mapping, Optional
from typing import Any, Dict, List, Optional
from langchain.agents import AgentExecutor, AgentType, initialize_agent
from langchain.agents.tools import Tool
@ -36,7 +36,7 @@ class FakeListLLM(LLM):
return self._call(*args, **kwargs)
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {}
@property

View File

@ -1,6 +1,6 @@
"""Test functionality related to natbot."""
from typing import Any, List, Mapping, Optional
from typing import Any, Dict, List, Optional
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.chains.natbot.base import NatBotChain
@ -32,7 +32,7 @@ class FakeLLM(LLM):
return len(text.split())
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {}

View File

@ -1,5 +1,5 @@
"""Fake Chat Model wrapper for testing purposes."""
from typing import Any, List, Mapping, Optional
from typing import Any, Dict, List, Optional
from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
@ -39,5 +39,5 @@ class FakeChatModel(SimpleChatModel):
return "fake-chat-model"
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {"key": "fake"}

View File

@ -1,5 +1,5 @@
"""Fake LLM wrapper for testing purposes."""
from typing import Any, List, Mapping, Optional, cast
from typing import Any, Dict, List, Mapping, Optional, cast
from pydantic import validator
@ -51,7 +51,7 @@ class FakeLLM(LLM):
return "bar"
@property
def _identifying_params(self) -> Mapping[str, Any]:
def _identifying_params(self) -> Dict[str, Any]:
return {}
@property