mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-21 14:43:07 +00:00
fix(core): accept int temperature in _get_ls_params for LangSmith tracing (#35302)
This commit is contained in:
@@ -812,9 +812,11 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
|
||||
ls_params["ls_model_name"] = self.model_name
|
||||
|
||||
# temperature
|
||||
if "temperature" in kwargs and isinstance(kwargs["temperature"], float):
|
||||
if "temperature" in kwargs and isinstance(kwargs["temperature"], (int, float)):
|
||||
ls_params["ls_temperature"] = kwargs["temperature"]
|
||||
elif hasattr(self, "temperature") and isinstance(self.temperature, float):
|
||||
elif hasattr(self, "temperature") and isinstance(
|
||||
self.temperature, (int, float)
|
||||
):
|
||||
ls_params["ls_temperature"] = self.temperature
|
||||
|
||||
# max_tokens
|
||||
|
||||
@@ -351,9 +351,11 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
||||
ls_params["ls_model_name"] = self.model_name
|
||||
|
||||
# temperature
|
||||
if "temperature" in kwargs and isinstance(kwargs["temperature"], float):
|
||||
if "temperature" in kwargs and isinstance(kwargs["temperature"], (int, float)):
|
||||
ls_params["ls_temperature"] = kwargs["temperature"]
|
||||
elif hasattr(self, "temperature") and isinstance(self.temperature, float):
|
||||
elif hasattr(self, "temperature") and isinstance(
|
||||
self.temperature, (int, float)
|
||||
):
|
||||
ls_params["ls_temperature"] = self.temperature
|
||||
|
||||
# max_tokens
|
||||
|
||||
@@ -1206,6 +1206,13 @@ def test_get_ls_params() -> None:
|
||||
ls_params = llm._get_ls_params(temperature=0.2)
|
||||
assert ls_params["ls_temperature"] == 0.2
|
||||
|
||||
# Test integer temperature values (regression test for issue #35300)
|
||||
ls_params = llm._get_ls_params(temperature=0)
|
||||
assert ls_params["ls_temperature"] == 0
|
||||
|
||||
ls_params = llm._get_ls_params(temperature=1)
|
||||
assert ls_params["ls_temperature"] == 1
|
||||
|
||||
ls_params = llm._get_ls_params(max_tokens=2048)
|
||||
assert ls_params["ls_max_tokens"] == 2048
|
||||
|
||||
|
||||
@@ -272,6 +272,13 @@ def test_get_ls_params() -> None:
|
||||
ls_params = llm._get_ls_params(temperature=0.2)
|
||||
assert ls_params["ls_temperature"] == 0.2
|
||||
|
||||
# Test integer temperature values (regression test for issue #35300)
|
||||
ls_params = llm._get_ls_params(temperature=0)
|
||||
assert ls_params["ls_temperature"] == 0
|
||||
|
||||
ls_params = llm._get_ls_params(temperature=1)
|
||||
assert ls_params["ls_temperature"] == 1
|
||||
|
||||
ls_params = llm._get_ls_params(max_tokens=2048)
|
||||
assert ls_params["ls_max_tokens"] == 2048
|
||||
|
||||
|
||||
Reference in New Issue
Block a user