mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-10 15:06:18 +00:00
Wfh/allow nonparallel (#10914)
This commit is contained in:
parent
bb3e6cb427
commit
ee8653f62c
@ -137,19 +137,6 @@ def _wrap_in_chain_factory(
|
||||
"(memory=new_memory, ...)\n\n"
|
||||
f'run_on_dataset("{dataset_name}", chain_constructor, ...)'
|
||||
)
|
||||
logger.warning(
|
||||
"Directly passing in a chain is not recommended as chains may have state."
|
||||
" This can lead to unexpected behavior as the "
|
||||
"same chain instance could be used across multiple datasets. Instead,"
|
||||
" please pass a chain constructor that creates a new "
|
||||
"chain with fresh memory each time it is called. This will safeguard"
|
||||
" against information leakage between dataset examples. "
|
||||
"\nFor example:\n\n"
|
||||
"def chain_constructor():\n"
|
||||
f" return {chain_class}(memory=new_memory, ...)\n\n"
|
||||
f'run_on_dataset("{dataset_name}", chain_constructor, ...)'
|
||||
)
|
||||
|
||||
return lambda: chain
|
||||
elif isinstance(llm_or_chain_factory, BaseLanguageModel):
|
||||
return llm_or_chain_factory
|
||||
@ -653,12 +640,9 @@ async def _arun_chain(
|
||||
) -> Union[dict, str]:
|
||||
"""Run a chain asynchronously on inputs."""
|
||||
inputs_ = inputs if input_mapper is None else input_mapper(inputs)
|
||||
if isinstance(chain, Chain):
|
||||
if isinstance(inputs_, dict) and len(inputs_) == 1:
|
||||
if isinstance(chain, Chain) and isinstance(inputs_, dict) and len(inputs_) == 1:
|
||||
val = next(iter(inputs_.values()))
|
||||
output = await chain.acall(val, callbacks=callbacks, tags=tags)
|
||||
else:
|
||||
output = await chain.acall(inputs_, callbacks=callbacks, tags=tags)
|
||||
else:
|
||||
runnable_config = RunnableConfig(tags=tags or [], callbacks=callbacks)
|
||||
output = await chain.ainvoke(inputs_, config=runnable_config)
|
||||
@ -781,12 +765,9 @@ def _run_chain(
|
||||
) -> Union[Dict, str]:
|
||||
"""Run a chain on inputs."""
|
||||
inputs_ = inputs if input_mapper is None else input_mapper(inputs)
|
||||
if isinstance(chain, Chain):
|
||||
if isinstance(inputs_, dict) and len(inputs_) == 1:
|
||||
if isinstance(chain, Chain) and isinstance(inputs_, dict) and len(inputs_) == 1:
|
||||
val = next(iter(inputs_.values()))
|
||||
output = chain(val, callbacks=callbacks, tags=tags)
|
||||
else:
|
||||
output = chain(inputs_, callbacks=callbacks, tags=tags)
|
||||
else:
|
||||
runnable_config = RunnableConfig(tags=tags or [], callbacks=callbacks)
|
||||
output = chain.invoke(inputs_, config=runnable_config)
|
||||
@ -1251,6 +1232,17 @@ def run_on_dataset(
|
||||
input_mapper,
|
||||
concurrency_level,
|
||||
)
|
||||
if concurrency_level == 0:
|
||||
batch_results = [
|
||||
_run_llm_or_chain(
|
||||
example,
|
||||
config,
|
||||
llm_or_chain_factory=wrapped_model,
|
||||
input_mapper=input_mapper,
|
||||
)
|
||||
for example, config in zip(examples, configs)
|
||||
]
|
||||
else:
|
||||
with runnable_config.get_executor_for_config(configs[0]) as executor:
|
||||
batch_results = list(
|
||||
executor.map(
|
||||
|
Loading…
Reference in New Issue
Block a user