mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-22 23:00:00 +00:00
parent
5935767056
commit
feec422bf7
@ -2,6 +2,8 @@ import logging
|
||||
from string import Template
|
||||
from typing import Any, Dict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
rel_query = Template(
|
||||
"""
|
||||
MATCH ()-[e:`$edge_type`]->()
|
||||
@ -97,7 +99,7 @@ class NebulaGraph:
|
||||
try:
|
||||
self.session_pool.close()
|
||||
except Exception as e:
|
||||
logging.warning(f"Could not close session pool. Error: {e}")
|
||||
logger.warning(f"Could not close session pool. Error: {e}")
|
||||
|
||||
@property
|
||||
def get_schema(self) -> str:
|
||||
@ -112,7 +114,7 @@ class NebulaGraph:
|
||||
try:
|
||||
result = self.session_pool.execute_parameter(query, params)
|
||||
if not result.is_succeeded():
|
||||
logging.warning(
|
||||
logger.warning(
|
||||
f"Error executing query to NebulaGraph. "
|
||||
f"Error: {result.error_msg()}\n"
|
||||
f"Query: {query} \n"
|
||||
@ -120,7 +122,7 @@ class NebulaGraph:
|
||||
return result
|
||||
|
||||
except NoValidSessionException:
|
||||
logging.warning(
|
||||
logger.warning(
|
||||
f"No valid session found in session pool. "
|
||||
f"Please consider increasing the session pool size. "
|
||||
f"Current size: {self.session_pool_size}"
|
||||
@ -134,7 +136,7 @@ class NebulaGraph:
|
||||
except RuntimeError as e:
|
||||
if retry < RETRY_TIMES:
|
||||
retry += 1
|
||||
logging.warning(
|
||||
logger.warning(
|
||||
f"Error executing query to NebulaGraph. "
|
||||
f"Retrying ({retry}/{RETRY_TIMES})...\n"
|
||||
f"query: {query} \n"
|
||||
@ -148,7 +150,7 @@ class NebulaGraph:
|
||||
# connection issue, try to recreate session pool
|
||||
if retry < RETRY_TIMES:
|
||||
retry += 1
|
||||
logging.warning(
|
||||
logger.warning(
|
||||
f"Connection issue with NebulaGraph. "
|
||||
f"Retrying ({retry}/{RETRY_TIMES})...\n to recreate session pool"
|
||||
)
|
||||
|
@ -12,7 +12,7 @@ from aiohttp import ServerTimeoutError
|
||||
from pydantic_v1 import BaseModel, Field, root_validator, validator
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BASE_URL = os.getenv("POWERBI_BASE_URL", "https://api.powerbi.com/v1.0/myorg")
|
||||
|
||||
@ -120,7 +120,7 @@ class PowerBIDataset(BaseModel):
|
||||
table for table in fixed_tables if table not in self.table_names
|
||||
]
|
||||
if non_existing_tables:
|
||||
_LOGGER.warning(
|
||||
logger.warning(
|
||||
"Table(s) %s not found in dataset.",
|
||||
", ".join(non_existing_tables),
|
||||
)
|
||||
@ -130,7 +130,7 @@ class PowerBIDataset(BaseModel):
|
||||
return tables if tables else None
|
||||
if isinstance(table_names, str) and table_names != "":
|
||||
if table_names not in self.table_names:
|
||||
_LOGGER.warning("Table %s not found in dataset.", table_names)
|
||||
logger.warning("Table %s not found in dataset.", table_names)
|
||||
return None
|
||||
return [fix_table_name(table_names)]
|
||||
return self.table_names
|
||||
@ -177,10 +177,10 @@ class PowerBIDataset(BaseModel):
|
||||
)
|
||||
self.schemas[table] = json_to_md(result["results"][0]["tables"][0]["rows"])
|
||||
except Timeout:
|
||||
_LOGGER.warning("Timeout while getting table info for %s", table)
|
||||
logger.warning("Timeout while getting table info for %s", table)
|
||||
self.schemas[table] = "unknown"
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
_LOGGER.warning("Error while getting table info for %s: %s", table, exc)
|
||||
logger.warning("Error while getting table info for %s: %s", table, exc)
|
||||
self.schemas[table] = "unknown"
|
||||
|
||||
async def _aget_schema(self, table: str) -> None:
|
||||
@ -191,10 +191,10 @@ class PowerBIDataset(BaseModel):
|
||||
)
|
||||
self.schemas[table] = json_to_md(result["results"][0]["tables"][0]["rows"])
|
||||
except ServerTimeoutError:
|
||||
_LOGGER.warning("Timeout while getting table info for %s", table)
|
||||
logger.warning("Timeout while getting table info for %s", table)
|
||||
self.schemas[table] = "unknown"
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
_LOGGER.warning("Error while getting table info for %s: %s", table, exc)
|
||||
logger.warning("Error while getting table info for %s: %s", table, exc)
|
||||
self.schemas[table] = "unknown"
|
||||
|
||||
def _create_json_content(self, command: str) -> dict[str, Any]:
|
||||
@ -207,7 +207,7 @@ class PowerBIDataset(BaseModel):
|
||||
|
||||
def run(self, command: str) -> Any:
|
||||
"""Execute a DAX command and return a json representing the results."""
|
||||
_LOGGER.debug("Running command: %s", command)
|
||||
logger.debug("Running command: %s", command)
|
||||
response = requests.post(
|
||||
self.request_url,
|
||||
json=self._create_json_content(command),
|
||||
@ -222,7 +222,7 @@ class PowerBIDataset(BaseModel):
|
||||
|
||||
async def arun(self, command: str) -> Any:
|
||||
"""Execute a DAX command and return the result asynchronously."""
|
||||
_LOGGER.debug("Running command: %s", command)
|
||||
logger.debug("Running command: %s", command)
|
||||
if self.aiosession:
|
||||
async with self.aiosession.post(
|
||||
self.request_url,
|
||||
|
Loading…
Reference in New Issue
Block a user