infra: add more formatter rules to openai (#23189)

Turns on
https://docs.astral.sh/ruff/settings/#format_docstring-code-format and
https://docs.astral.sh/ruff/settings/#format_skip-magic-trailing-comma

```toml
[tool.ruff.format]
docstring-code-format = true
skip-magic-trailing-comma = true
```
This commit is contained in:
Bagatur
2024-06-19 11:39:58 -07:00
committed by GitHub
parent 710197e18c
commit 8698cb9b28
21 changed files with 290 additions and 445 deletions

View File

@@ -1,7 +1,4 @@
from langchain_openai.llms.azure import AzureOpenAI
from langchain_openai.llms.base import OpenAI
__all__ = [
"OpenAI",
"AzureOpenAI",
]
__all__ = ["OpenAI", "AzureOpenAI"]

View File

@@ -117,10 +117,7 @@ class AzureOpenAI(BaseOpenAI):
"OPENAI_API_BASE"
)
values["openai_proxy"] = get_from_dict_or_env(
values,
"openai_proxy",
"OPENAI_PROXY",
default="",
values, "openai_proxy", "OPENAI_PROXY", default=""
)
values["openai_organization"] = (
values["openai_organization"]

View File

@@ -173,10 +173,7 @@ class BaseOpenAI(BaseLLM):
"OPENAI_API_BASE"
)
values["openai_proxy"] = get_from_dict_or_env(
values,
"openai_proxy",
"OPENAI_PROXY",
default="",
values, "openai_proxy", "OPENAI_PROXY", default=""
)
values["openai_organization"] = (
values["openai_organization"]
@@ -365,11 +362,7 @@ class BaseOpenAI(BaseLLM):
if not system_fingerprint:
system_fingerprint = response.get("system_fingerprint")
return self.create_llm_result(
choices,
prompts,
params,
token_usage,
system_fingerprint=system_fingerprint,
choices, prompts, params, token_usage, system_fingerprint=system_fingerprint
)
async def _agenerate(
@@ -425,11 +418,7 @@ class BaseOpenAI(BaseLLM):
choices.extend(response["choices"])
_update_token_usage(_keys, response, token_usage)
return self.create_llm_result(
choices,
prompts,
params,
token_usage,
system_fingerprint=system_fingerprint,
choices, prompts, params, token_usage, system_fingerprint=system_fingerprint
)
def get_sub_prompts(