From 1c31234eed865e84036d2eb6f21653887a710947 Mon Sep 17 00:00:00 2001 From: Manuel Jaiczay Date: Fri, 23 Aug 2024 00:29:46 +0200 Subject: [PATCH] community: fix HuggingFacePipeline pipeline_kwargs (#19920) Fix handling of pipeline_kwargs to prioritize class attribute defaults. #19770 Co-authored-by: jaizo Co-authored-by: Isaac Francisco <78627776+isahers1@users.noreply.github.com> --- .../langchain_community/llms/huggingface_pipeline.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/llms/huggingface_pipeline.py b/libs/community/langchain_community/llms/huggingface_pipeline.py index a1989702b0e..1ccbc76954a 100644 --- a/libs/community/langchain_community/llms/huggingface_pipeline.py +++ b/libs/community/langchain_community/llms/huggingface_pipeline.py @@ -261,7 +261,10 @@ class HuggingFacePipeline(BaseLLM): ) -> LLMResult: # List to hold all results text_generations: List[str] = [] - pipeline_kwargs = kwargs.get("pipeline_kwargs", {}) + + default_pipeline_kwargs = self.pipeline_kwargs if self.pipeline_kwargs else {} + pipeline_kwargs = kwargs.get("pipeline_kwargs", default_pipeline_kwargs) + skip_prompt = kwargs.get("skip_prompt", False) for i in range(0, len(prompts), self.batch_size):