mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-19 13:23:35 +00:00
experimental: Add config to convert_to_graph_documents (#24012)
PR title: Experimental: Add config to convert_to_graph_documents Description: In order to use langfuse, i need to pass the langfuse configuration when invoking the chain. langchain_experimental does not allow to add any parameters (beside the documents) to the convert_to_graph_documents method. This way, I cannot monitor the chain in langfuse. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Catarina Franco <catarina.franco@criticalsoftware.com> Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
parent
f2d810b3c0
commit
b01d938997
@ -13,6 +13,7 @@ from langchain_core.prompts import (
|
|||||||
PromptTemplate,
|
PromptTemplate,
|
||||||
)
|
)
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field, create_model
|
from langchain_core.pydantic_v1 import BaseModel, Field, create_model
|
||||||
|
from langchain_core.runnables import RunnableConfig
|
||||||
|
|
||||||
examples = [
|
examples = [
|
||||||
{
|
{
|
||||||
@ -710,13 +711,15 @@ class LLMGraphTransformer:
|
|||||||
prompt = prompt or default_prompt
|
prompt = prompt or default_prompt
|
||||||
self.chain = prompt | structured_llm
|
self.chain = prompt | structured_llm
|
||||||
|
|
||||||
def process_response(self, document: Document) -> GraphDocument:
|
def process_response(
|
||||||
|
self, document: Document, config: Optional[RunnableConfig] = None
|
||||||
|
) -> GraphDocument:
|
||||||
"""
|
"""
|
||||||
Processes a single document, transforming it into a graph document using
|
Processes a single document, transforming it into a graph document using
|
||||||
an LLM based on the model's schema and constraints.
|
an LLM based on the model's schema and constraints.
|
||||||
"""
|
"""
|
||||||
text = document.page_content
|
text = document.page_content
|
||||||
raw_schema = self.chain.invoke({"input": text})
|
raw_schema = self.chain.invoke({"input": text}, config=config)
|
||||||
if self._function_call:
|
if self._function_call:
|
||||||
raw_schema = cast(Dict[Any, Any], raw_schema)
|
raw_schema = cast(Dict[Any, Any], raw_schema)
|
||||||
nodes, relationships = _convert_to_graph_document(raw_schema)
|
nodes, relationships = _convert_to_graph_document(raw_schema)
|
||||||
@ -765,7 +768,7 @@ class LLMGraphTransformer:
|
|||||||
return GraphDocument(nodes=nodes, relationships=relationships, source=document)
|
return GraphDocument(nodes=nodes, relationships=relationships, source=document)
|
||||||
|
|
||||||
def convert_to_graph_documents(
|
def convert_to_graph_documents(
|
||||||
self, documents: Sequence[Document]
|
self, documents: Sequence[Document], config: Optional[RunnableConfig] = None
|
||||||
) -> List[GraphDocument]:
|
) -> List[GraphDocument]:
|
||||||
"""Convert a sequence of documents into graph documents.
|
"""Convert a sequence of documents into graph documents.
|
||||||
|
|
||||||
@ -776,15 +779,17 @@ class LLMGraphTransformer:
|
|||||||
Returns:
|
Returns:
|
||||||
Sequence[GraphDocument]: The transformed documents as graphs.
|
Sequence[GraphDocument]: The transformed documents as graphs.
|
||||||
"""
|
"""
|
||||||
return [self.process_response(document) for document in documents]
|
return [self.process_response(document, config) for document in documents]
|
||||||
|
|
||||||
async def aprocess_response(self, document: Document) -> GraphDocument:
|
async def aprocess_response(
|
||||||
|
self, document: Document, config: Optional[RunnableConfig] = None
|
||||||
|
) -> GraphDocument:
|
||||||
"""
|
"""
|
||||||
Asynchronously processes a single document, transforming it into a
|
Asynchronously processes a single document, transforming it into a
|
||||||
graph document.
|
graph document.
|
||||||
"""
|
"""
|
||||||
text = document.page_content
|
text = document.page_content
|
||||||
raw_schema = await self.chain.ainvoke({"input": text})
|
raw_schema = await self.chain.ainvoke({"input": text}, config=config)
|
||||||
raw_schema = cast(Dict[Any, Any], raw_schema)
|
raw_schema = cast(Dict[Any, Any], raw_schema)
|
||||||
nodes, relationships = _convert_to_graph_document(raw_schema)
|
nodes, relationships = _convert_to_graph_document(raw_schema)
|
||||||
|
|
||||||
@ -811,13 +816,13 @@ class LLMGraphTransformer:
|
|||||||
return GraphDocument(nodes=nodes, relationships=relationships, source=document)
|
return GraphDocument(nodes=nodes, relationships=relationships, source=document)
|
||||||
|
|
||||||
async def aconvert_to_graph_documents(
|
async def aconvert_to_graph_documents(
|
||||||
self, documents: Sequence[Document]
|
self, documents: Sequence[Document], config: Optional[RunnableConfig] = None
|
||||||
) -> List[GraphDocument]:
|
) -> List[GraphDocument]:
|
||||||
"""
|
"""
|
||||||
Asynchronously convert a sequence of documents into graph documents.
|
Asynchronously convert a sequence of documents into graph documents.
|
||||||
"""
|
"""
|
||||||
tasks = [
|
tasks = [
|
||||||
asyncio.create_task(self.aprocess_response(document))
|
asyncio.create_task(self.aprocess_response(document, config))
|
||||||
for document in documents
|
for document in documents
|
||||||
]
|
]
|
||||||
results = await asyncio.gather(*tasks)
|
results = await asyncio.gather(*tasks)
|
||||||
|
Loading…
Reference in New Issue
Block a user