mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-13 22:59:05 +00:00
allow passing of stop params into openai (#232)
This commit is contained in:
parent
347fc49d4d
commit
3ca2c8d6c5
@ -82,7 +82,7 @@ class OpenAI(LLM, BaseModel):
|
|||||||
return values
|
return values
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _default_params(self) -> Mapping[str, Any]:
|
def _default_params(self) -> Dict[str, Any]:
|
||||||
"""Get the default parameters for calling OpenAI API."""
|
"""Get the default parameters for calling OpenAI API."""
|
||||||
normal_params = {
|
normal_params = {
|
||||||
"temperature": self.temperature,
|
"temperature": self.temperature,
|
||||||
@ -115,7 +115,10 @@ class OpenAI(LLM, BaseModel):
|
|||||||
|
|
||||||
response = openai("Tell me a joke.")
|
response = openai("Tell me a joke.")
|
||||||
"""
|
"""
|
||||||
response = self.client.create(
|
params = self._default_params
|
||||||
model=self.model_name, prompt=prompt, stop=stop, **self._default_params
|
if stop is not None:
|
||||||
)
|
if "stop" in params:
|
||||||
|
raise ValueError("`stop` found in both the input and default params.")
|
||||||
|
params["stop"] = stop
|
||||||
|
response = self.client.create(model=self.model_name, prompt=prompt, **params)
|
||||||
return response["choices"][0]["text"]
|
return response["choices"][0]["text"]
|
||||||
|
@ -26,3 +26,21 @@ def test_openai_extra_kwargs() -> None:
|
|||||||
# Test that if provided twice it errors
|
# Test that if provided twice it errors
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
OpenAI(foo=3, model_kwargs={"foo": 2})
|
OpenAI(foo=3, model_kwargs={"foo": 2})
|
||||||
|
|
||||||
|
|
||||||
|
def test_openai_stop_valid() -> None:
|
||||||
|
"""Test openai stop logic on valid configuration."""
|
||||||
|
query = "write an ordered list of five items"
|
||||||
|
first_llm = OpenAI(stop="3", temperature=0)
|
||||||
|
first_output = first_llm(query)
|
||||||
|
second_llm = OpenAI(temperature=0)
|
||||||
|
second_output = second_llm(query, stop=["3"])
|
||||||
|
# Because it stops on new lines, shouldn't return anything
|
||||||
|
assert first_output == second_output
|
||||||
|
|
||||||
|
|
||||||
|
def test_openai_stop_error() -> None:
|
||||||
|
"""Test openai stop logic on bad configuration."""
|
||||||
|
llm = OpenAI(stop="3", temperature=0)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
llm("write an ordered list of five items", stop=["\n"])
|
||||||
|
Loading…
Reference in New Issue
Block a user