Adds llm_chain_kwargs to BaseRetrievalQA.from_llm (#14224)

- **Description:** Adds `llm_chain_kwargs` to `BaseRetrievalQA.from_llm`
so these can be passed to the LLM at runtime,
- **Issue:** https://github.com/langchain-ai/langchain/issues/14216,

---------

Signed-off-by: ugm2 <unaigaraymaestre@gmail.com>
This commit is contained in:
Unai Garay Maestre 2023-12-04 20:34:01 +01:00 committed by GitHub
parent 6ce5dab38c
commit 6826feea14
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -68,11 +68,14 @@ class BaseRetrievalQA(Chain):
llm: BaseLanguageModel,
prompt: Optional[PromptTemplate] = None,
callbacks: Callbacks = None,
llm_chain_kwargs: Optional[dict] = None,
**kwargs: Any,
) -> BaseRetrievalQA:
"""Initialize from LLM."""
_prompt = prompt or PROMPT_SELECTOR.get_prompt(llm)
llm_chain = LLMChain(llm=llm, prompt=_prompt, callbacks=callbacks)
llm_chain = LLMChain(
llm=llm, prompt=_prompt, callbacks=callbacks, **(llm_chain_kwargs or {})
)
document_prompt = PromptTemplate(
input_variables=["page_content"], template="Context:\n{page_content}"
)