mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-08 03:44:14 +00:00
Resolve warnings of logger library (#2681)
Signed-off-by: Emmanuel Ferdman <emmanuelferdman@gmail.com>
This commit is contained in:
parent
6a82d9122b
commit
d300a4d412
@ -58,7 +58,7 @@ class KnowledgeApiClient(ApiClient):
|
|||||||
return self._post("/knowledge/space/add", data=request)
|
return self._post("/knowledge/space/add", data=request)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if "have already named" in str(e):
|
if "have already named" in str(e):
|
||||||
logger.warn(f"you have already named {request.name}")
|
logger.warning(f"you have already named {request.name}")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
@ -143,7 +143,7 @@ def knowledge_init(
|
|||||||
)
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if overwrite and "have already named" in str(ex):
|
if overwrite and "have already named" in str(ex):
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"Document {filename} already exist in space {space.name}, "
|
f"Document {filename} already exist in space {space.name}, "
|
||||||
"overwrite it"
|
"overwrite it"
|
||||||
)
|
)
|
||||||
@ -172,7 +172,7 @@ def knowledge_init(
|
|||||||
return doc_id
|
return doc_id
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if skip_wrong_doc:
|
if skip_wrong_doc:
|
||||||
logger.warn(f"Upload {filename} to {space.name} failed: {str(e)}")
|
logger.warning(f"Upload {filename} to {space.name} failed: {str(e)}")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
@ -195,7 +195,7 @@ def knowledge_init(
|
|||||||
doc_ids = [r.result() for r in as_completed(tasks)]
|
doc_ids = [r.result() for r in as_completed(tasks)]
|
||||||
doc_ids = list(filter(lambda x: x, doc_ids))
|
doc_ids = list(filter(lambda x: x, doc_ids))
|
||||||
if not doc_ids:
|
if not doc_ids:
|
||||||
logger.warn("Warning: no document to sync")
|
logger.warning("Warning: no document to sync")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -374,7 +374,7 @@ def knowledge_delete(
|
|||||||
.lower()
|
.lower()
|
||||||
)
|
)
|
||||||
if user_input != "yes":
|
if user_input != "yes":
|
||||||
logger.warn("Delete operation cancelled.")
|
logger.warning("Delete operation cancelled.")
|
||||||
return
|
return
|
||||||
client.space_delete(space)
|
client.space_delete(space)
|
||||||
logger.info("Delete the whole knowledge space successfully!")
|
logger.info("Delete the whole knowledge space successfully!")
|
||||||
@ -390,7 +390,7 @@ def knowledge_delete(
|
|||||||
.lower()
|
.lower()
|
||||||
)
|
)
|
||||||
if user_input != "yes":
|
if user_input != "yes":
|
||||||
logger.warn("Delete operation cancelled.")
|
logger.warning("Delete operation cancelled.")
|
||||||
return
|
return
|
||||||
client.document_delete(space_name, KnowledgeDocumentRequest(doc_name=doc_name))
|
client.document_delete(space_name, KnowledgeDocumentRequest(doc_name=doc_name))
|
||||||
logger.info(
|
logger.info(
|
||||||
|
@ -610,7 +610,7 @@ class BaseChat(ABC):
|
|||||||
return prompt_define_response
|
return prompt_define_response
|
||||||
|
|
||||||
def _blocking_stream_call(self):
|
def _blocking_stream_call(self):
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"_blocking_stream_call is only temporarily used in webserver and will be "
|
"_blocking_stream_call is only temporarily used in webserver and will be "
|
||||||
"deleted soon, please use stream_call to replace it for higher performance"
|
"deleted soon, please use stream_call to replace it for higher performance"
|
||||||
)
|
)
|
||||||
|
@ -68,7 +68,7 @@ def create_directory_if_not_exists(directory_path: str) -> bool:
|
|||||||
logger.debug(f"Created directory: {directory_path}")
|
logger.debug(f"Created directory: {directory_path}")
|
||||||
return True
|
return True
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.warn(f"Error creating directory {directory_path}: {e}")
|
logger.warning(f"Error creating directory {directory_path}: {e}")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.info(f"Directory {directory_path} already exists")
|
logger.info(f"Directory {directory_path} already exists")
|
||||||
|
@ -1065,7 +1065,7 @@ def _get_graph(dag: DAG):
|
|||||||
try:
|
try:
|
||||||
from graphviz import Digraph
|
from graphviz import Digraph
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.warn("Can't import graphviz, skip visualize DAG")
|
logger.warning("Can't import graphviz, skip visualize DAG")
|
||||||
return None, None
|
return None, None
|
||||||
dot = Digraph(name=dag.dag_id)
|
dot = Digraph(name=dag.dag_id)
|
||||||
mermaid_str = "graph TD;\n" # Initialize Mermaid graph definition
|
mermaid_str = "graph TD;\n" # Initialize Mermaid graph definition
|
||||||
|
@ -25,13 +25,13 @@ def _hf_check_quantization(model_params: HFLLMDeployModelParameters):
|
|||||||
has_quantization = model_params.quantization is not None
|
has_quantization = model_params.quantization is not None
|
||||||
if has_quantization:
|
if has_quantization:
|
||||||
if model_params.real_device != "cuda":
|
if model_params.real_device != "cuda":
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"8-bit quantization and 4-bit quantization just supported by cuda"
|
"8-bit quantization and 4-bit quantization just supported by cuda"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
elif "chatglm" in model_name:
|
elif "chatglm" in model_name:
|
||||||
if "int4" not in model_name:
|
if "int4" not in model_name:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"chatglm or chatglm2 not support quantization now, see: "
|
"chatglm or chatglm2 not support quantization now, see: "
|
||||||
"https://github.com/huggingface/transformers/issues/25228"
|
"https://github.com/huggingface/transformers/issues/25228"
|
||||||
)
|
)
|
||||||
@ -389,7 +389,7 @@ def load_huggingface_quantization_model(
|
|||||||
tokenizer.bos_token_id = 1
|
tokenizer.bos_token_id = 1
|
||||||
tokenizer.pad_token_id = 0
|
tokenizer.pad_token_id = 0
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(f"{str(e)}")
|
logger.warning(f"{str(e)}")
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Current model type is not LlamaForCausalLM, load tokenizer by "
|
"Current model type is not LlamaForCausalLM, load tokenizer by "
|
||||||
|
@ -80,7 +80,7 @@ async def _async_heartbeat_sender(
|
|||||||
try:
|
try:
|
||||||
await send_heartbeat_func(worker_run_data)
|
await send_heartbeat_func(worker_run_data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(f"Send heartbeat func error: {str(e)}")
|
logger.warning(f"Send heartbeat func error: {str(e)}")
|
||||||
finally:
|
finally:
|
||||||
await asyncio.sleep(heartbeat_interval)
|
await asyncio.sleep(heartbeat_interval)
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ class TiktokenProxyTokenizer(ProxyTokenizer):
|
|||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
self._support_encoding = False
|
self._support_encoding = False
|
||||||
logger.warn("tiktoken not installed, cannot count tokens")
|
logger.warning("tiktoken not installed, cannot count tokens")
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
if not model_name:
|
if not model_name:
|
||||||
|
@ -71,7 +71,7 @@ class ProxyTokenizerWrapper:
|
|||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
self._support_encoding = False
|
self._support_encoding = False
|
||||||
logger.warn("tiktoken not installed, cannot count tokens, returning -1")
|
logger.warning("tiktoken not installed, cannot count tokens, returning -1")
|
||||||
return -1
|
return -1
|
||||||
try:
|
try:
|
||||||
if not model_name:
|
if not model_name:
|
||||||
|
@ -167,7 +167,7 @@ def initialize_cache(
|
|||||||
persist_dir, mem_table_buffer_mb=max_memory_mb
|
persist_dir, mem_table_buffer_mb=max_memory_mb
|
||||||
)
|
)
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"Can't import DiskCacheStorage, use MemoryCacheStorage, import error "
|
f"Can't import DiskCacheStorage, use MemoryCacheStorage, import error "
|
||||||
f"message: {str(e)}"
|
f"message: {str(e)}"
|
||||||
)
|
)
|
||||||
|
@ -12,7 +12,7 @@ def _clear_model_cache(device="cuda"):
|
|||||||
|
|
||||||
_clear_torch_cache(device)
|
_clear_torch_cache(device)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.warn("Torch not installed, skip clear torch cache")
|
logger.warning("Torch not installed, skip clear torch cache")
|
||||||
# TODO clear other cache
|
# TODO clear other cache
|
||||||
|
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ def _clear_torch_cache(device="cuda"):
|
|||||||
|
|
||||||
empty_cache()
|
empty_cache()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(f"Clear mps torch cache error, {str(e)}")
|
logger.warning(f"Clear mps torch cache error, {str(e)}")
|
||||||
elif (hasattr(backends, "cuda") and backends.cuda.is_built()) or torch.has_cuda:
|
elif (hasattr(backends, "cuda") and backends.cuda.is_built()) or torch.has_cuda:
|
||||||
device_count = torch.cuda.device_count()
|
device_count = torch.cuda.device_count()
|
||||||
for device_id in range(device_count):
|
for device_id in range(device_count):
|
||||||
|
@ -86,6 +86,6 @@ class ElevenLabsSpeech(VoiceBase):
|
|||||||
os.remove("speech.mpeg")
|
os.remove("speech.mpeg")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.warn("Request failed with status code:", response.status_code)
|
logger.warning("Request failed with status code:", response.status_code)
|
||||||
logger.info("Response content:", response.content)
|
logger.info("Response content:", response.content)
|
||||||
return False
|
return False
|
||||||
|
@ -88,7 +88,7 @@ class DBSummaryClient:
|
|||||||
self.db_summary_embedding(item["db_name"], item["db_type"])
|
self.db_summary_embedding(item["db_name"], item["db_type"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
message = traceback.format_exc()
|
message = traceback.format_exc()
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"{item['db_name']}, {item['db_type']} summary error!{str(e)}, "
|
f"{item['db_name']}, {item['db_type']} summary error!{str(e)}, "
|
||||||
f"detail: {message}"
|
f"detail: {message}"
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user