fix(ChatKnowledge): summary support zhipu message_converter (#1085)

This commit is contained in:
Aries-ckt 2024-01-18 15:09:09 +08:00 committed by GitHub
parent 0936856c3a
commit 0162dee474
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 11 additions and 10 deletions

View File

@ -400,7 +400,9 @@ class KnowledgeService:
assembler = SummaryAssembler(
knowledge=knowledge,
model_name=request.model_name,
llm_client=DefaultLLMClient(worker_manager=worker_manager),
llm_client=DefaultLLMClient(
worker_manager=worker_manager, auto_convert_message=True
),
language=CFG.LANGUAGE,
chunk_parameters=chunk_parameters,
)

View File

@ -18,7 +18,7 @@ Write a quick summary of the following context:
the summary should be as concise as possible and not overly lengthy.Please keep the answer within approximately 200 characters.
"""
REFINE_SUMMARY_TEMPLATE_ZH = """我们已经提供了一个到某一点的现有总结:{context}\n 请根据你之前推理的内容进行最终的总结,总结回答的时候最好按照1.2.3.进行. 注意:请用<中文>来进行总结。"""
REFINE_SUMMARY_TEMPLATE_ZH = """我们已经提供了一个到某一点的现有总结:{context}\n 请根据你之前推理的内容进行总结,总结回答的时候最好按照1.2.3.进行. 注意:请用<中文>来进行总结。"""
REFINE_SUMMARY_TEMPLATE_EN = """
We have provided an existing summary up to a certain point: {context}, We have the opportunity to refine the existing summary (only if needed) with some more context below.
@ -144,7 +144,7 @@ class SummaryExtractor(Extractor):
from dbgpt.core import ModelMessage
prompt = prompt_template.format(context=chunk_text)
messages = [ModelMessage(role=ModelMessageRoleType.SYSTEM, content=prompt)]
messages = [ModelMessage(role=ModelMessageRoleType.HUMAN, content=prompt)]
request = ModelRequest(model=self._model_name, messages=messages)
tasks.append(self._llm_client.generate(request))
summary_results = await run_async_tasks(

View File

@ -21,11 +21,9 @@ This example shows how to use AWEL to build a simple rag summary example.
.. code-block:: shell
DBGPT_SERVER="http://127.0.0.1:5000"
FILE_PATH="{your_file_path}"
curl -X POST http://127.0.0.1:5555/api/v1/awel/trigger/examples/rag/summary \
-H "Content-Type: application/json" -d '{
"file_path": $FILE_PATH
"url": "https://docs.dbgpt.site/docs/awel"
}'
"""
from typing import Dict
@ -33,12 +31,13 @@ from typing import Dict
from dbgpt._private.pydantic import BaseModel, Field
from dbgpt.core.awel import DAG, HttpTrigger, MapOperator
from dbgpt.model import OpenAILLMClient
from dbgpt.rag.knowledge.base import KnowledgeType
from dbgpt.rag.operator.knowledge import KnowledgeOperator
from dbgpt.rag.operator.summary import SummaryAssemblerOperator
class TriggerReqBody(BaseModel):
file_path: str = Field(..., description="file_path")
url: str = Field(..., description="url")
class RequestHandleOperator(MapOperator[TriggerReqBody, Dict]):
@ -47,7 +46,7 @@ class RequestHandleOperator(MapOperator[TriggerReqBody, Dict]):
async def map(self, input_value: TriggerReqBody) -> Dict:
params = {
"file_path": input_value.file_path,
"url": input_value.url,
}
print(f"Receive input value: {input_value}")
return params
@ -58,9 +57,9 @@ with DAG("dbgpt_awel_simple_rag_summary_example") as dag:
"/examples/rag/summary", methods="POST", request_body=TriggerReqBody
)
request_handle_task = RequestHandleOperator()
path_operator = MapOperator(lambda request: request["file_path"])
path_operator = MapOperator(lambda request: request["url"])
# build knowledge operator
knowledge_operator = KnowledgeOperator()
knowledge_operator = KnowledgeOperator(knowledge_type=KnowledgeType.URL)
# build summary assembler operator
summary_operator = SummaryAssemblerOperator(
llm_client=OpenAILLMClient(), language="en"