diff --git a/libs/langchain/langchain/llms/forefrontai.py b/libs/langchain/langchain/llms/forefrontai.py index 3664144be26..98eb6d571b6 100644 --- a/libs/langchain/langchain/llms/forefrontai.py +++ b/libs/langchain/langchain/llms/forefrontai.py @@ -1,12 +1,12 @@ from typing import Any, Dict, List, Mapping, Optional import requests -from langchain_core.pydantic_v1 import Extra, root_validator +from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM from langchain.llms.utils import enforce_stop_tokens -from langchain.utils import get_from_dict_or_env +from langchain.utils import convert_to_secret_str, get_from_dict_or_env class ForefrontAI(LLM): @@ -41,7 +41,7 @@ class ForefrontAI(LLM): repetition_penalty: int = 1 """Penalizes repeated tokens according to frequency.""" - forefrontai_api_key: Optional[str] = None + forefrontai_api_key: SecretStr = None base_url: Optional[str] = None """Base url to use, if None decides based on model name.""" @@ -54,10 +54,9 @@ class ForefrontAI(LLM): @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key exists in environment.""" - forefrontai_api_key = get_from_dict_or_env( - values, "forefrontai_api_key", "FOREFRONTAI_API_KEY" + values["forefrontai_api_key"] = convert_to_secret_str( + get_from_dict_or_env(values, "forefrontai_api_key", "FOREFRONTAI_API_KEY") ) - values["forefrontai_api_key"] = forefrontai_api_key return values @property @@ -102,10 +101,11 @@ class ForefrontAI(LLM): response = ForefrontAI("Tell me a joke.") """ + auth_value = f"Bearer {self.forefrontai_api_key.get_secret_value()}" response = requests.post( url=self.endpoint_url, headers={ - "Authorization": f"Bearer {self.forefrontai_api_key}", + "Authorization": auth_value, "Content-Type": "application/json", }, json={"text": prompt, **self._default_params, **kwargs}, diff --git a/libs/langchain/tests/unit_tests/llms/test_forefrontai.py b/libs/langchain/tests/unit_tests/llms/test_forefrontai.py new file mode 100644 index 00000000000..65560874b8f --- /dev/null +++ b/libs/langchain/tests/unit_tests/llms/test_forefrontai.py @@ -0,0 +1,50 @@ +"""Test ForeFrontAI LLM""" +from typing import cast + +from langchain_core.pydantic_v1 import SecretStr +from pytest import CaptureFixture, MonkeyPatch + +from langchain.llms.forefrontai import ForefrontAI + + +def test_forefrontai_api_key_is_secret_string() -> None: + """Test that the API key is stored as a SecretStr.""" + llm = ForefrontAI(forefrontai_api_key="secret-api-key", temperature=0.2) + assert isinstance(llm.forefrontai_api_key, SecretStr) + + +def test_forefrontai_api_key_masked_when_passed_from_env( + monkeypatch: MonkeyPatch, capsys: CaptureFixture +) -> None: + """Test that the API key is masked when passed from an environment variable.""" + monkeypatch.setenv("FOREFRONTAI_API_KEY", "secret-api-key") + llm = ForefrontAI(temperature=0.2) + print(llm.forefrontai_api_key, end="") + captured = capsys.readouterr() + + assert captured.out == "**********" + + +def test_forefrontai_api_key_masked_when_passed_via_constructor( + capsys: CaptureFixture, +) -> None: + """Test that the API key is masked when passed via the constructor.""" + llm = ForefrontAI( + forefrontai_api_key="secret-api-key", + temperature=0.2, + ) + print(llm.forefrontai_api_key, end="") + captured = capsys.readouterr() + + assert captured.out == "**********" + + +def test_forefrontai_uses_actual_secret_value_from_secretstr() -> None: + """Test that the actual secret value is correctly retrieved.""" + llm = ForefrontAI( + forefrontai_api_key="secret-api-key", + temperature=0.2, + ) + assert ( + cast(SecretStr, llm.forefrontai_api_key).get_secret_value() == "secret-api-key" + )