mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-14 05:31:40 +00:00
refactor: The first refactored version for sdk release (#907)
Co-authored-by: chengfangyin2 <chengfangyin3@jd.com>
This commit is contained in:
0
dbgpt/datasource/rdbms/dialect/__init__.py
Normal file
0
dbgpt/datasource/rdbms/dialect/__init__.py
Normal file
14
dbgpt/datasource/rdbms/dialect/starrocks/__init__.py
Normal file
14
dbgpt/datasource/rdbms/dialect/starrocks/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
#! /usr/bin/python3
|
||||
# Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
@@ -0,0 +1,22 @@
|
||||
#! /usr/bin/python3
|
||||
# Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sqlalchemy.dialects import registry
|
||||
|
||||
registry.register(
|
||||
"starrocks",
|
||||
"dbgpt.datasource.rdbms.dialect.starrocks.sqlalchemy.dialect",
|
||||
"StarRocksDialect",
|
||||
)
|
104
dbgpt/datasource/rdbms/dialect/starrocks/sqlalchemy/datatype.py
Normal file
104
dbgpt/datasource/rdbms/dialect/starrocks/sqlalchemy/datatype.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional, List, Any, Type, Dict
|
||||
|
||||
from sqlalchemy import Numeric, Integer, Float
|
||||
from sqlalchemy.sql import sqltypes
|
||||
from sqlalchemy.sql.type_api import TypeEngine
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TINYINT(Integer): # pylint: disable=no-init
|
||||
__visit_name__ = "TINYINT"
|
||||
|
||||
|
||||
class LARGEINT(Integer): # pylint: disable=no-init
|
||||
__visit_name__ = "LARGEINT"
|
||||
|
||||
|
||||
class DOUBLE(Float): # pylint: disable=no-init
|
||||
__visit_name__ = "DOUBLE"
|
||||
|
||||
|
||||
class HLL(Numeric): # pylint: disable=no-init
|
||||
__visit_name__ = "HLL"
|
||||
|
||||
|
||||
class BITMAP(Numeric): # pylint: disable=no-init
|
||||
__visit_name__ = "BITMAP"
|
||||
|
||||
|
||||
class PERCENTILE(Numeric): # pylint: disable=no-init
|
||||
__visit_name__ = "PERCENTILE"
|
||||
|
||||
|
||||
class ARRAY(TypeEngine): # pylint: disable=no-init
|
||||
__visit_name__ = "ARRAY"
|
||||
|
||||
@property
|
||||
def python_type(self) -> Optional[Type[List[Any]]]:
|
||||
return list
|
||||
|
||||
|
||||
class MAP(TypeEngine): # pylint: disable=no-init
|
||||
__visit_name__ = "MAP"
|
||||
|
||||
@property
|
||||
def python_type(self) -> Optional[Type[Dict[Any, Any]]]:
|
||||
return dict
|
||||
|
||||
|
||||
class STRUCT(TypeEngine): # pylint: disable=no-init
|
||||
__visit_name__ = "STRUCT"
|
||||
|
||||
@property
|
||||
def python_type(self) -> Optional[Type[Any]]:
|
||||
return None
|
||||
|
||||
|
||||
_type_map = {
|
||||
# === Boolean ===
|
||||
"boolean": sqltypes.BOOLEAN,
|
||||
# === Integer ===
|
||||
"tinyint": sqltypes.SMALLINT,
|
||||
"smallint": sqltypes.SMALLINT,
|
||||
"int": sqltypes.INTEGER,
|
||||
"bigint": sqltypes.BIGINT,
|
||||
"largeint": LARGEINT,
|
||||
# === Floating-point ===
|
||||
"float": sqltypes.FLOAT,
|
||||
"double": DOUBLE,
|
||||
# === Fixed-precision ===
|
||||
"decimal": sqltypes.DECIMAL,
|
||||
# === String ===
|
||||
"varchar": sqltypes.VARCHAR,
|
||||
"char": sqltypes.CHAR,
|
||||
"json": sqltypes.JSON,
|
||||
# === Date and time ===
|
||||
"date": sqltypes.DATE,
|
||||
"datetime": sqltypes.DATETIME,
|
||||
"timestamp": sqltypes.DATETIME,
|
||||
# === Structural ===
|
||||
"array": ARRAY,
|
||||
"map": MAP,
|
||||
"struct": STRUCT,
|
||||
"hll": HLL,
|
||||
"percentile": PERCENTILE,
|
||||
"bitmap": BITMAP,
|
||||
}
|
||||
|
||||
|
||||
def parse_sqltype(type_str: str) -> TypeEngine:
|
||||
type_str = type_str.strip().lower()
|
||||
match = re.match(r"^(?P<type>\w+)\s*(?:\((?P<options>.*)\))?", type_str)
|
||||
if not match:
|
||||
logger.warning(f"Could not parse type name '{type_str}'")
|
||||
return sqltypes.NULLTYPE
|
||||
type_name = match.group("type")
|
||||
|
||||
if type_name not in _type_map:
|
||||
logger.warning(f"Did not recognize type '{type_name}'")
|
||||
return sqltypes.NULLTYPE
|
||||
type_class = _type_map[type_name]
|
||||
return type_class()
|
173
dbgpt/datasource/rdbms/dialect/starrocks/sqlalchemy/dialect.py
Normal file
173
dbgpt/datasource/rdbms/dialect/starrocks/sqlalchemy/dialect.py
Normal file
@@ -0,0 +1,173 @@
|
||||
#! /usr/bin/python3
|
||||
# Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from sqlalchemy import log, exc, text
|
||||
from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql
|
||||
from sqlalchemy.engine import Connection
|
||||
|
||||
from dbgpt.datasource.rdbms.dialect.starrocks.sqlalchemy import datatype
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class StarRocksDialect(MySQLDialect_pymysql):
|
||||
# Caching
|
||||
# Warnings are generated by SQLAlchmey if this flag is not explicitly set
|
||||
# and tests are needed before being enabled
|
||||
supports_statement_cache = False
|
||||
|
||||
name = "starrocks"
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(StarRocksDialect, self).__init__(*args, **kw)
|
||||
|
||||
def has_table(self, connection, table_name, schema=None, **kw):
|
||||
self._ensure_has_table_connection(connection)
|
||||
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
assert schema is not None
|
||||
|
||||
quote = self.identifier_preparer.quote_identifier
|
||||
full_name = quote(table_name)
|
||||
if schema:
|
||||
full_name = "{}.{}".format(quote(schema), full_name)
|
||||
|
||||
res = connection.execute(text(f"DESCRIBE {full_name}"))
|
||||
return res.first() is not None
|
||||
|
||||
def get_schema_names(self, connection, **kw):
|
||||
rp = connection.exec_driver_sql("SHOW schemas")
|
||||
return [r[0] for r in rp]
|
||||
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a Unicode SHOW TABLES from a given schema."""
|
||||
if schema is not None:
|
||||
current_schema = schema
|
||||
else:
|
||||
current_schema = self.default_schema_name
|
||||
|
||||
charset = self._connection_charset
|
||||
|
||||
rp = connection.exec_driver_sql(
|
||||
"SHOW FULL TABLES FROM %s"
|
||||
% self.identifier_preparer.quote_identifier(current_schema)
|
||||
)
|
||||
|
||||
return [
|
||||
row[0]
|
||||
for row in self._compat_fetchall(rp, charset=charset)
|
||||
if row[1] == "BASE TABLE"
|
||||
]
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
charset = self._connection_charset
|
||||
rp = connection.exec_driver_sql(
|
||||
"SHOW FULL TABLES FROM %s"
|
||||
% self.identifier_preparer.quote_identifier(schema)
|
||||
)
|
||||
return [
|
||||
row[0]
|
||||
for row in self._compat_fetchall(rp, charset=charset)
|
||||
if row[1] in ("VIEW", "SYSTEM VIEW")
|
||||
]
|
||||
|
||||
def get_columns(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> List[Dict[str, Any]]:
|
||||
if not self.has_table(connection, table_name, schema):
|
||||
raise exc.NoSuchTableError(f"schema={schema}, table={table_name}")
|
||||
schema = schema or self._get_default_schema_name(connection)
|
||||
|
||||
quote = self.identifier_preparer.quote_identifier
|
||||
full_name = quote(table_name)
|
||||
if schema:
|
||||
full_name = "{}.{}".format(quote(schema), full_name)
|
||||
|
||||
res = connection.execute(text(f"SHOW COLUMNS FROM {full_name}"))
|
||||
columns = []
|
||||
for record in res:
|
||||
column = dict(
|
||||
name=record.Field,
|
||||
type=datatype.parse_sqltype(record.Type),
|
||||
nullable=record.Null == "YES",
|
||||
default=record.Default,
|
||||
)
|
||||
columns.append(column)
|
||||
return columns
|
||||
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
return { # type: ignore # pep-655 not supported
|
||||
"name": None,
|
||||
"constrained_columns": [],
|
||||
}
|
||||
|
||||
def get_unique_constraints(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> List[Dict[str, Any]]:
|
||||
return []
|
||||
|
||||
def get_check_constraints(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> List[Dict[str, Any]]:
|
||||
return []
|
||||
|
||||
def get_foreign_keys(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> List[Dict[str, Any]]:
|
||||
return []
|
||||
|
||||
def get_primary_keys(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> List[str]:
|
||||
pk = self.get_pk_constraint(connection, table_name, schema)
|
||||
return pk.get("constrained_columns") # type: ignore
|
||||
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
return []
|
||||
|
||||
def has_sequence(
|
||||
self, connection: Connection, sequence_name: str, schema: str = None, **kw
|
||||
) -> bool:
|
||||
return False
|
||||
|
||||
def get_sequence_names(
|
||||
self, connection: Connection, schema: str = None, **kw
|
||||
) -> List[str]:
|
||||
return []
|
||||
|
||||
def get_temp_view_names(
|
||||
self, connection: Connection, schema: str = None, **kw
|
||||
) -> List[str]:
|
||||
return []
|
||||
|
||||
def get_temp_table_names(
|
||||
self, connection: Connection, schema: str = None, **kw
|
||||
) -> List[str]:
|
||||
return []
|
||||
|
||||
def get_table_options(self, connection, table_name, schema=None, **kw):
|
||||
return {}
|
||||
|
||||
def get_table_comment(
|
||||
self, connection: Connection, table_name: str, schema: str = None, **kw
|
||||
) -> Dict[str, Any]:
|
||||
return dict(text=None)
|
Reference in New Issue
Block a user