mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-13 13:36:15 +00:00
core: Add ruff rules for pycodestyle Warning (W) (#26964)
All auto-fixes.
This commit is contained in:
committed by
GitHub
parent
9404e7af9d
commit
db8845a62a
@@ -20,16 +20,16 @@ class ChatResult(BaseModel):
|
||||
|
||||
generations: list[ChatGeneration]
|
||||
"""List of the chat generations.
|
||||
|
||||
|
||||
Generations is a list to allow for multiple candidate generations for a single
|
||||
input prompt.
|
||||
"""
|
||||
llm_output: Optional[dict] = None
|
||||
"""For arbitrary LLM provider specific output.
|
||||
|
||||
|
||||
This dictionary is a free-form dictionary that can contain any information that the
|
||||
provider wants to return. It is not standardized and is provider-specific.
|
||||
|
||||
|
||||
Users should generally avoid relying on this field and instead rely on
|
||||
accessing relevant information from standardized fields present in
|
||||
AIMessage.
|
||||
|
@@ -26,8 +26,8 @@ class Generation(Serializable):
|
||||
"""Generated text output."""
|
||||
|
||||
generation_info: Optional[dict[str, Any]] = None
|
||||
"""Raw response from the provider.
|
||||
|
||||
"""Raw response from the provider.
|
||||
|
||||
May include things like the reason for finishing or token log probabilities.
|
||||
"""
|
||||
type: Literal["Generation"] = "Generation"
|
||||
|
@@ -22,25 +22,25 @@ class LLMResult(BaseModel):
|
||||
list[Union[Generation, ChatGeneration, GenerationChunk, ChatGenerationChunk]]
|
||||
]
|
||||
"""Generated outputs.
|
||||
|
||||
|
||||
The first dimension of the list represents completions for different input
|
||||
prompts.
|
||||
|
||||
|
||||
The second dimension of the list represents different candidate generations
|
||||
for a given prompt.
|
||||
|
||||
|
||||
When returned from an LLM the type is List[List[Generation]].
|
||||
When returned from a chat model the type is List[List[ChatGeneration]].
|
||||
|
||||
|
||||
ChatGeneration is a subclass of Generation that has a field for a structured
|
||||
chat message.
|
||||
"""
|
||||
llm_output: Optional[dict] = None
|
||||
"""For arbitrary LLM provider specific output.
|
||||
|
||||
|
||||
This dictionary is a free-form dictionary that can contain any information that the
|
||||
provider wants to return. It is not standardized and is provider-specific.
|
||||
|
||||
|
||||
Users should generally avoid relying on this field and instead rely on
|
||||
accessing relevant information from standardized fields present in
|
||||
AIMessage.
|
||||
|
Reference in New Issue
Block a user