core: Add ruff rules TRY (tryceratops) (#29388)

TRY004 ("use TypeError rather than ValueError") existing errors are
marked as ignore to preserve backward compatibility.
LMK if you prefer to fix some of them.

Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
Christophe Bornet 2025-01-24 06:01:40 +01:00 committed by GitHub
parent 723b603f52
commit dbb6b7b103
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 138 additions and 126 deletions

View File

@ -127,7 +127,7 @@ def trace_as_chain_group(
except Exception as e:
if not group_cm.ended:
run_manager.on_chain_error(e)
raise e
raise
else:
if not group_cm.ended:
run_manager.on_chain_end({})
@ -207,7 +207,7 @@ async def atrace_as_chain_group(
except Exception as e:
if not group_cm.ended:
await run_manager.on_chain_error(e)
raise e
raise
else:
if not group_cm.ended:
await run_manager.on_chain_end({})
@ -289,7 +289,7 @@ def handle_event(
f" {repr(e)}"
)
if handler.raise_error:
raise e
raise
finally:
if coros:
try:
@ -388,7 +388,7 @@ async def _ahandle_event_for_handler(
f"Error in {handler.__class__.__name__}.{event_name} callback: {repr(e)}"
)
if handler.raise_error:
raise e
raise
async def ahandle_event(

View File

@ -268,7 +268,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
f"Invalid input type {type(input)}. "
"Must be a PromptValue, str, or list of BaseMessages."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
def invoke(
self,
@ -407,9 +407,6 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
generation = chunk
else:
generation += chunk
if generation is None:
msg = "No generation chunks were returned"
raise ValueError(msg)
except BaseException as e:
run_manager.on_llm_error(
e,
@ -417,9 +414,14 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
generations=[[generation]] if generation else []
),
)
raise e
else:
run_manager.on_llm_end(LLMResult(generations=[[generation]]))
raise
if generation is None:
err = ValueError("No generation chunks were returned")
run_manager.on_llm_error(err, response=LLMResult(generations=[]))
raise err
run_manager.on_llm_end(LLMResult(generations=[[generation]]))
async def astream(
self,
@ -485,19 +487,21 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
generation = chunk
else:
generation += chunk
if generation is None:
msg = "No generation chunks were returned"
raise ValueError(msg)
except BaseException as e:
await run_manager.on_llm_error(
e,
response=LLMResult(generations=[[generation]] if generation else []),
)
raise e
else:
await run_manager.on_llm_end(
LLMResult(generations=[[generation]]),
)
raise
if generation is None:
err = ValueError("No generation chunks were returned")
await run_manager.on_llm_error(err, response=LLMResult(generations=[]))
raise err
await run_manager.on_llm_end(
LLMResult(generations=[[generation]]),
)
# --- Custom methods ---
@ -641,7 +645,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
except BaseException as e:
if run_managers:
run_managers[i].on_llm_error(e, response=LLMResult(generations=[]))
raise e
raise
flattened_outputs = [
LLMResult(generations=[res.generations], llm_output=res.llm_output) # type: ignore[list-item]
for res in results
@ -1022,7 +1026,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
return generation.message
else:
msg = "Unexpected generation type"
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
async def _call_async(
self,
@ -1039,7 +1043,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
return generation.message
else:
msg = "Unexpected generation type"
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
@deprecated("0.1.7", alternative="invoke", removal="1.0")
def call_as_llm(
@ -1057,7 +1061,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
return result.content
else:
msg = "Cannot use predict when output is not a string."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
@deprecated("0.1.7", alternative="invoke", removal="1.0")
def predict_messages(
@ -1082,7 +1086,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
return result.content
else:
msg = "Cannot use predict when output is not a string."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
async def apredict_messages(

View File

@ -242,7 +242,7 @@ class GenericFakeChatModel(BaseChatModel):
f"Expected generate to return a ChatResult, "
f"but got {type(chat_result)} instead."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
message = chat_result.generations[0].message
@ -251,7 +251,7 @@ class GenericFakeChatModel(BaseChatModel):
f"Expected invoke to return an AIMessage, "
f"but got {type(message)} instead."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
content = message.content

View File

@ -337,7 +337,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
f"Invalid input type {type(input)}. "
"Must be a PromptValue, str, or list of BaseMessages."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
def _get_ls_params(
self,
@ -448,7 +448,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
if return_exceptions:
return cast(list[str], [e for _ in inputs])
else:
raise e
raise
else:
batches = [
inputs[i : i + max_concurrency]
@ -494,7 +494,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
if return_exceptions:
return cast(list[str], [e for _ in inputs])
else:
raise e
raise
else:
batches = [
inputs[i : i + max_concurrency]
@ -562,9 +562,6 @@ class BaseLLM(BaseLanguageModel[str], ABC):
generation = chunk
else:
generation += chunk
if generation is None:
msg = "No generation chunks were returned"
raise ValueError(msg)
except BaseException as e:
run_manager.on_llm_error(
e,
@ -572,9 +569,14 @@ class BaseLLM(BaseLanguageModel[str], ABC):
generations=[[generation]] if generation else []
),
)
raise e
else:
run_manager.on_llm_end(LLMResult(generations=[[generation]]))
raise
if generation is None:
err = ValueError("No generation chunks were returned")
run_manager.on_llm_error(err, response=LLMResult(generations=[]))
raise err
run_manager.on_llm_end(LLMResult(generations=[[generation]]))
async def astream(
self,
@ -632,17 +634,19 @@ class BaseLLM(BaseLanguageModel[str], ABC):
generation = chunk
else:
generation += chunk
if generation is None:
msg = "No generation chunks were returned"
raise ValueError(msg)
except BaseException as e:
await run_manager.on_llm_error(
e,
response=LLMResult(generations=[[generation]] if generation else []),
)
raise e
else:
await run_manager.on_llm_end(LLMResult(generations=[[generation]]))
raise
if generation is None:
err = ValueError("No generation chunks were returned")
await run_manager.on_llm_error(err, response=LLMResult(generations=[]))
raise err
await run_manager.on_llm_end(LLMResult(generations=[[generation]]))
# --- Custom methods ---
@ -790,7 +794,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
except BaseException as e:
for run_manager in run_managers:
run_manager.on_llm_error(e, response=LLMResult(generations=[]))
raise e
raise
flattened_outputs = output.flatten()
for manager, flattened_output in zip(run_managers, flattened_outputs):
manager.on_llm_end(flattened_output)
@ -850,7 +854,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
"Argument 'prompts' is expected to be of type List[str], received"
f" argument of type {type(prompts)}."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
# Create callback managers
if isinstance(metadata, list):
metadata = [
@ -1036,7 +1040,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
for run_manager in run_managers
]
)
raise e
raise
flattened_outputs = output.flatten()
await asyncio.gather(
*[
@ -1289,7 +1293,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
f"{type(prompt)}. If you want to run the LLM on multiple prompts, use "
"`generate` instead."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return (
self.generate(
[prompt],

View File

@ -363,6 +363,17 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
return self
tool_calls = []
invalid_tool_calls = []
def add_chunk_to_invalid_tool_calls(chunk: ToolCallChunk) -> None:
invalid_tool_calls.append(
create_invalid_tool_call(
name=chunk["name"],
args=chunk["args"],
id=chunk["id"],
error=None,
)
)
for chunk in self.tool_call_chunks:
try:
args_ = parse_partial_json(chunk["args"]) if chunk["args"] != "" else {} # type: ignore[arg-type]
@ -375,17 +386,9 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
)
)
else:
msg = "Malformed args."
raise ValueError(msg)
add_chunk_to_invalid_tool_calls(chunk)
except Exception:
invalid_tool_calls.append(
create_invalid_tool_call(
name=chunk["name"],
args=chunk["args"],
id=chunk["id"],
error=None,
)
)
add_chunk_to_invalid_tool_calls(chunk)
self.tool_calls = tool_calls
self.invalid_tool_calls = invalid_tool_calls
return self

View File

@ -124,7 +124,7 @@ def get_buffer_string(
role = m.role
else:
msg = f"Got unsupported message type: {m}"
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
message = f"{role}: {m.content}"
if isinstance(m, AIMessage) and "function_call" in m.additional_kwargs:
message += f"{m.additional_kwargs['function_call']}"
@ -1400,7 +1400,7 @@ def _get_message_openai_role(message: BaseMessage) -> str:
return message.role
else:
msg = f"Unknown BaseMessage type {message.__class__}."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
def _convert_to_openai_tool_calls(tool_calls: list[ToolCall]) -> list[dict]:

View File

@ -282,19 +282,20 @@ class PydanticToolsParser(JsonOutputToolsParser):
name_dict = {tool.__name__: tool for tool in self.tools}
pydantic_objects = []
for res in json_results:
try:
if not isinstance(res["args"], dict):
msg = (
f"Tool arguments must be specified as a dict, received: "
f"{res['args']}"
)
raise ValueError(msg)
pydantic_objects.append(name_dict[res["type"]](**res["args"]))
except (ValidationError, ValueError) as e:
if not isinstance(res["args"], dict):
if partial:
continue
else:
raise e
msg = (
f"Tool arguments must be specified as a dict, received: "
f"{res['args']}"
)
raise ValueError(msg)
try:
pydantic_objects.append(name_dict[res["type"]](**res["args"]))
except (ValidationError, ValueError):
if partial:
continue
raise
if self.first_tool_only:
return pydantic_objects[0] if pydantic_objects else None
else:

View File

@ -66,10 +66,10 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]):
try:
json_object = super().parse_result(result)
return self._parse_obj(json_object)
except OutputParserException as e:
except OutputParserException:
if partial:
return None
raise e
raise
def parse(self, text: str) -> TBaseModel:
"""Parse the output of an LLM call to a pydantic object.

View File

@ -244,7 +244,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
f"variable {self.variable_name} should be a list of base messages, "
f"got {value} of type {type(value)}"
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
value = convert_to_messages(value)
if self.n_messages:
value = value[-self.n_messages :]
@ -577,7 +577,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
return cls(prompt=prompt, **kwargs)
else:
msg = f"Invalid template: {template}"
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
@classmethod
def from_template_file(
@ -1225,7 +1225,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
result.extend(message)
else:
msg = f"Unexpected input: {message_template}"
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return result
async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
@ -1253,7 +1253,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
result.extend(message)
else:
msg = f"Unexpected input: {message_template}"
raise ValueError(msg)
raise ValueError(msg) # noqa:TRY004
return result
def partial(self, **kwargs: Any) -> ChatPromptTemplate:
@ -1399,7 +1399,7 @@ def _create_template_from_message_type(
var_name_wrapped, is_optional = template
if not isinstance(var_name_wrapped, str):
msg = f"Expected variable name to be a string. Got: {var_name_wrapped}"
raise ValueError(msg)
raise ValueError(msg) # noqa:TRY004
if var_name_wrapped[0] != "{" or var_name_wrapped[-1] != "}":
msg = (
f"Invalid placeholder template: {var_name_wrapped}."

View File

@ -78,7 +78,7 @@ def _load_examples(config: dict) -> dict:
config["examples"] = examples
else:
msg = "Invalid examples format. Only list or string are supported."
raise ValueError(msg)
raise ValueError(msg) # noqa:TRY004
return config

View File

@ -262,7 +262,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
result = self._get_relevant_documents(input, **_kwargs)
except Exception as e:
run_manager.on_retriever_error(e)
raise e
raise
else:
run_manager.on_retriever_end(
result,
@ -325,7 +325,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
result = await self._aget_relevant_documents(input, **_kwargs)
except Exception as e:
await run_manager.on_retriever_error(e)
raise e
raise
else:
await run_manager.on_retriever_end(
result,

View File

@ -188,7 +188,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
last_error = e
except BaseException as e:
run_manager.on_chain_error(e)
raise e
raise
else:
run_manager.on_chain_end(output)
return output
@ -241,7 +241,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
last_error = e
except BaseException as e:
await run_manager.on_chain_error(e)
raise e
raise
else:
await run_manager.on_chain_end(output)
return output
@ -488,7 +488,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
last_error = e
except BaseException as e:
run_manager.on_chain_error(e)
raise e
raise
else:
first_error = None
break
@ -507,7 +507,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
output = None
except BaseException as e:
run_manager.on_chain_error(e)
raise e
raise
run_manager.on_chain_end(output)
async def astream(
@ -558,7 +558,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
last_error = e
except BaseException as e:
await run_manager.on_chain_error(e)
raise e
raise
else:
first_error = None
break
@ -577,7 +577,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
output = None
except BaseException as e:
await run_manager.on_chain_error(e)
raise e
raise
await run_manager.on_chain_end(output)
def __getattr__(self, name: str) -> Any:

View File

@ -50,9 +50,9 @@ def is_uuid(value: str) -> bool:
"""
try:
UUID(value)
return True
except ValueError:
return False
return True
class Edge(NamedTuple):

View File

@ -481,7 +481,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
f"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]. "
f"Got {input_val}."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
def _get_output_messages(
self, output_val: Union[str, BaseMessage, Sequence[BaseMessage], dict]
@ -517,7 +517,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
f"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]. "
f"Got {output_val}."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
def _enter_history(self, input: Any, config: RunnableConfig) -> list[BaseMessage]:
hist: BaseChatMessageHistory = config["configurable"]["message_history"]

View File

@ -474,7 +474,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
) -> dict[str, Any]:
if not isinstance(input, dict):
msg = "The input to RunnablePassthrough.assign() must be a dict."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return {
**input,
@ -502,7 +502,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
) -> dict[str, Any]:
if not isinstance(input, dict):
msg = "The input to RunnablePassthrough.assign() must be a dict."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return {
**input,
@ -555,7 +555,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
for chunk in for_passthrough:
if not isinstance(chunk, dict):
msg = "The input to RunnablePassthrough.assign() must be a dict."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
# remove mapper keys from passthrough chunk, to be overwritten by map
filtered = AddableDict(
{k: v for k, v in chunk.items() if k not in mapper_keys}
@ -605,7 +605,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
async for chunk in for_passthrough:
if not isinstance(chunk, dict):
msg = "The input to RunnablePassthrough.assign() must be a dict."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
# remove mapper keys from passthrough chunk, to be overwritten by map output
filtered = AddableDict(
@ -708,7 +708,7 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
def _pick(self, input: dict[str, Any]) -> Any:
if not isinstance(input, dict):
msg = "The input to RunnablePassthrough.assign() must be a dict."
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
if isinstance(self.keys, str):
return input.get(self.keys)

View File

@ -397,9 +397,9 @@ def get_lambda_source(func: Callable) -> Optional[str]:
tree = ast.parse(textwrap.dedent(code))
visitor = GetLambdaSource()
visitor.visit(tree)
return visitor.source if visitor.count == 1 else name
except (SyntaxError, TypeError, OSError, SystemError):
return name
return visitor.source if visitor.count == 1 else name
@lru_cache(maxsize=256)
@ -440,10 +440,11 @@ def get_function_nonlocals(func: Callable) -> list[Any]:
break
else:
values.append(vv)
return values
except (SyntaxError, TypeError, OSError, SystemError):
return []
return values
def indent_lines_after_first(text: str, prefix: str) -> str:
"""Indent all lines of text after the first line.

View File

@ -680,6 +680,7 @@ class ChildTool(BaseTool):
content = None
artifact = None
status = "success"
error_to_raise: Union[Exception, KeyboardInterrupt, None] = None
try:
child_config = patch_config(config, callbacks=run_manager.get_child())
@ -699,26 +700,25 @@ class ChildTool(BaseTool):
f"expected. Instead generated response of type: "
f"{type(response)}."
)
raise ValueError(msg)
content, artifact = response
error_to_raise = ValueError(msg)
else:
content, artifact = response
else:
content = response
status = "success"
except (ValidationError, ValidationErrorV1) as e:
if not self.handle_validation_error:
error_to_raise = e
else:
content = _handle_validation_error(e, flag=self.handle_validation_error)
status = "error"
status = "error"
except ToolException as e:
if not self.handle_tool_error:
error_to_raise = e
else:
content = _handle_tool_error(e, flag=self.handle_tool_error)
status = "error"
status = "error"
except (Exception, KeyboardInterrupt) as e:
error_to_raise = e
status = "error"
if error_to_raise:
run_manager.on_tool_error(error_to_raise)
@ -789,6 +789,7 @@ class ChildTool(BaseTool):
)
content = None
artifact = None
status = "success"
error_to_raise: Optional[Union[Exception, KeyboardInterrupt]] = None
try:
tool_args, tool_kwargs = self._to_args_and_kwargs(tool_input, tool_call_id)
@ -816,26 +817,25 @@ class ChildTool(BaseTool):
f"expected. Instead generated response of type: "
f"{type(response)}."
)
raise ValueError(msg)
content, artifact = response
error_to_raise = ValueError(msg)
else:
content, artifact = response
else:
content = response
status = "success"
except ValidationError as e:
if not self.handle_validation_error:
error_to_raise = e
else:
content = _handle_validation_error(e, flag=self.handle_validation_error)
status = "error"
status = "error"
except ToolException as e:
if not self.handle_tool_error:
error_to_raise = e
else:
content = _handle_tool_error(e, flag=self.handle_tool_error)
status = "error"
status = "error"
except (Exception, KeyboardInterrupt) as e:
error_to_raise = e
status = "error"
if error_to_raise:
await run_manager.on_tool_error(error_to_raise)
@ -873,7 +873,7 @@ def _handle_validation_error(
f"Got unexpected type of `handle_validation_error`. Expected bool, "
f"str or callable. Received: {flag}"
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return content
@ -893,7 +893,7 @@ def _handle_tool_error(
f"Got unexpected type of `handle_tool_error`. Expected bool, str "
f"or callable. Received: {flag}"
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return content

View File

@ -139,7 +139,7 @@ class EvaluatorCallbackHandler(BaseTracer):
f"{evaluator.__class__.__name__}: {repr(e)}",
exc_info=True,
)
raise e
raise
example_id = str(run.reference_example_id)
with self.lock:
for res in eval_results:

View File

@ -97,8 +97,7 @@ def extract_sub_links(
if continue_on_failure:
logger.warning(f"Unable to load link {link}. Raised exception:\n\n{e}")
continue
else:
raise e
raise
results = []
for path in absolute_paths:

View File

@ -53,13 +53,14 @@ def grab_literal(template: str, l_del: str) -> tuple[str, str]:
# Look for the next tag and move the template to it
literal, template = template.split(l_del, 1)
_CURRENT_LINE += literal.count("\n")
return (literal, template)
# There are no more tags in the template?
except ValueError:
# Then the rest of the template is a literal
return (template, "")
return (literal, template)
def l_sa_check(template: str, literal: str, is_standalone: bool) -> bool:
"""Do a preliminary check to see if a tag could be a standalone.

View File

@ -33,5 +33,5 @@ def _dict_int_op(
msg = (
f"Unknown value types: {types}. Only dict and int values are supported."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: TRY004
return combined

View File

@ -54,11 +54,6 @@ def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
raise ValueError(msg)
try:
import simsimd as simd # type: ignore
x = np.array(x, dtype=np.float32)
y = np.array(y, dtype=np.float32)
z = 1 - np.array(simd.cdist(x, y, metric="cosine"))
return z
except ImportError:
logger.debug(
"Unable to import simsimd, defaulting to NumPy implementation. If you want "
@ -72,6 +67,10 @@ def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0
return similarity
x = np.array(x, dtype=np.float32)
y = np.array(y, dtype=np.float32)
return 1 - np.array(simd.cdist(x, y, metric="cosine"))
def maximal_marginal_relevance(
query_embedding: np.ndarray,

View File

@ -44,7 +44,7 @@ python = ">=3.12.4"
[tool.poetry.extras]
[tool.ruff.lint]
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "UP", "W", "YTT",]
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "TRY", "UP", "W", "YTT",]
ignore = [ "COM812", "UP007", "S110", "S112",]
[tool.coverage.run]

View File

@ -103,7 +103,7 @@ def _runnable(inputs: dict) -> str:
if inputs["text"] == "bar":
return "second"
if isinstance(inputs["exception"], ValueError):
raise RuntimeError
raise RuntimeError # noqa: TRY004
return "third"

View File

@ -1600,7 +1600,7 @@ async def test_event_stream_with_retry() -> None:
def fail(inputs: str) -> None:
"""Simple func."""
msg = "fail"
raise Exception(msg)
raise ValueError(msg)
chain = RunnableLambda(success) | RunnableLambda(fail).with_retry(
stop_after_attempt=1,

View File

@ -1556,7 +1556,7 @@ async def test_event_stream_with_retry() -> None:
def fail(inputs: str) -> None:
"""Simple func."""
msg = "fail"
raise Exception(msg)
raise ValueError(msg)
chain = RunnableLambda(success) | RunnableLambda(fail).with_retry(
stop_after_attempt=1,
@ -1906,7 +1906,7 @@ async def test_runnable_with_message_history() -> None:
return fn(*args, **kwargs)
except Exception as e:
raised_errors.append(e)
raise e
raise
return _get_output_messages
@ -2097,7 +2097,7 @@ class StreamingRunnable(Runnable[Input, Output]):
final_output = None
for element in self.iterable:
if isinstance(element, BaseException):
raise element
raise element # noqa: TRY301
yield element
if final_output is None:
@ -2409,10 +2409,10 @@ async def test_break_astream_events() -> None:
self.started = True
try:
await asyncio.sleep(0.5)
return input
except asyncio.CancelledError:
self.cancelled = True
raise
return input
def reset(self) -> None:
self.started = False
@ -2474,10 +2474,10 @@ async def test_cancel_astream_events() -> None:
self.started = True
try:
await asyncio.sleep(0.5)
return input
except asyncio.CancelledError:
self.cancelled = True
raise
return input
def reset(self) -> None:
self.started = False