From 8b511a3a784657fb4a5a34670986242c4046aa4f Mon Sep 17 00:00:00 2001 From: Ahmed Tammaa Date: Sun, 23 Feb 2025 22:00:32 +0200 Subject: [PATCH] [Exception Handling] DeepSeek JSONDecodeError (#29758) For Context please check #29626 The Deepseek is using langchain_openai. The error happens that it show `json decode error`. I added a handler for this to give a more sensible error message which is DeepSeek API returned empty/invalid json. Reproducing the issue is a bit challenging as it is inconsistent, sometimes DeepSeek returns valid data and in other times it returns invalid data which triggers the JSON Decode Error. This PR is an exception handling, but not an ultimate fix for the issue. --------- Co-authored-by: Chester Curme --- .../langchain_deepseek/chat_models.py | 46 ++++++++++++++++++- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/libs/partners/deepseek/langchain_deepseek/chat_models.py b/libs/partners/deepseek/langchain_deepseek/chat_models.py index f3bcc2d1583..2398b1f9881 100644 --- a/libs/partners/deepseek/langchain_deepseek/chat_models.py +++ b/libs/partners/deepseek/langchain_deepseek/chat_models.py @@ -1,9 +1,13 @@ """DeepSeek chat models.""" -from typing import Dict, Optional, Type, Union +from json import JSONDecodeError +from typing import Any, Dict, Iterator, List, Optional, Type, Union import openai -from langchain_core.messages import AIMessageChunk +from langchain_core.callbacks import ( + CallbackManagerForLLMRun, +) +from langchain_core.messages import AIMessageChunk, BaseMessage from langchain_core.outputs import ChatGenerationChunk, ChatResult from langchain_core.utils import from_env, secret_from_env from langchain_openai.chat_models.base import BaseChatOpenAI @@ -239,3 +243,41 @@ class ChatDeepSeek(BaseChatOpenAI): reasoning_content ) return generation_chunk + + def _stream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[ChatGenerationChunk]: + try: + yield from super()._stream( + messages, stop=stop, run_manager=run_manager, **kwargs + ) + except JSONDecodeError as e: + raise JSONDecodeError( + "DeepSeek API returned an invalid response. " + "Please check the API status and try again.", + e.doc, + e.pos, + ) from e + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + try: + return super()._generate( + messages, stop=stop, run_manager=run_manager, **kwargs + ) + except JSONDecodeError as e: + raise JSONDecodeError( + "DeepSeek API returned an invalid response. " + "Please check the API status and try again.", + e.doc, + e.pos, + ) from e