mirror of
https://github.com/CodePhiliaX/Chat2DB.git
synced 2025-09-19 23:33:34 +08:00
Merge branch 'dev' into oracle-schema-export
This commit is contained in:
@ -1,21 +1,21 @@
|
||||
# Chat2DB AI SQL功能使用说明
|
||||
Chat2DB包含一系列基于ChatGPT的AI SQL使用功能,主要包括自然语言转SQL、SQL解释、SQL优化和SQL转换。 使用这些AI功能,可以将自然语言转换成本地查询SQL,而不仅仅是SQL查询伪代码;可以将SQL解释成自然语言,帮助用户理解复杂的SQL;可以针对慢SQL提供全方位的优化建议,提升查询效率;可以转换不同数据库类型的SQL语言,降低数据库迁移难度等等。
|
||||
## 使用配置
|
||||
### 点击设置
|
||||
### 点击设置【UI旧】
|
||||
<a><img src="https://img.alicdn.com/imgextra/i2/O1CN01hecdtO1acLegtiP9k_!!6000000003350-2-tps-2400-1600.png" width="100%"/></a>
|
||||
### 配置AI
|
||||
#### 配置OPENAI
|
||||
#### 配置OPENAI【UI旧】
|
||||
使用OPENAI的ChatSql功能需要满足两个条件
|
||||
- 配置OPENAI_API_KEY,如没有OPENAI_API_KEY可加入答疑群根据群公告指引获取chat2db自定义key
|
||||
- 客户端网络可以连接到OPENAI官网,如果本地VPN未能全局生效,可以通过在客户端中设置网络代理HOST和PORT来保证网络连通性
|
||||
<a><img src="https://img.alicdn.com/imgextra/i2/O1CN01anrJMI1FEtSBbmTau_!!6000000000456-0-tps-1594-964.jpg" width="100%"/></a>
|
||||
#### 配置自定义AI
|
||||
#### 配置自定义AI【UI旧】
|
||||
- 自定义AI可以是用户自己部署的任意AI模型,例如ChatGLM、ChatGPT、文心一言、通义千问等等,但是自定义的接口输入和输出需要符合自定义的协议规范才可快速使用,否则可能需要二次开发。代码中提供了两个DEMO,只需要配置自定义AI接口地址,以及接口是否流式输出即可查看。具体使用中可以参考DEMO接口来编写自定义接口,或者直接在DEMO接口中进行二次开发,封装自己的自定义接口
|
||||
- 自定义的流式输出接口配置DEMO
|
||||
<a><img src="https://img.alicdn.com/imgextra/i1/O1CN01xMqnRH1DlkdSekvSF_!!6000000000257-0-tps-591-508.jpg" width="100%"/></a>
|
||||
- 自定义的非流式输出接口配置DEMO
|
||||
<a><img src="https://img.alicdn.com/imgextra/i1/O1CN01JqmbGo1fW0GAQhRu4_!!6000000004013-0-tps-587-489.jpg" width="100%"/></a>
|
||||
## 自然语言转SQL
|
||||
## 自然语言转SQL【UI旧】
|
||||
### 功能描述
|
||||
输入自然语言,选中执行并传入查询相关的表结构信息,则会返回相应的SQL
|
||||
- 输入样例:查询学生小明的各科目成绩,并传入关联数据表course,score,student,student_course
|
||||
|
@ -182,7 +182,7 @@ $ npm run build:web:prod / cp -r dist ../chat2db-server/chat2db-server-start/src
|
||||
|
||||
### Discord
|
||||
<!-- [](您的Discord邀请链接) -->
|
||||
[](https://discord.gg/N6JscF7q)
|
||||
[](https://discord.com/invite/Zn9dFQKSJa)
|
||||
|
||||
## LICENSE
|
||||
|
||||
@ -191,7 +191,7 @@ The primary license used by this software is the [Apache License 2.0](https://ww
|
||||
|
||||
## ❤️ Acknowledgements
|
||||
|
||||
Thanks to all the students who contributed to Chat2DB~
|
||||
Thanks to all the students who contributed to Chat2DB~~
|
||||
|
||||
<a href="https://github.com/chat2db/Chat2DB/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=chat2db/Chat2DB" />
|
||||
|
@ -201,7 +201,7 @@ $ cp -r dist/index.html ../chat2db-server/chat2db-server-start/src/main/resource
|
||||
|
||||
## ❤️ 致谢
|
||||
|
||||
感谢所有为 Chat2DB 贡献力量的同学们~
|
||||
感谢所有为 Chat2DB 贡献力量的同学们~~
|
||||
|
||||
<a href="https://github.com/chat2db/Chat2DB/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=chat2db/Chat2DB" />
|
||||
|
@ -250,6 +250,10 @@ export const dataSourceFormConfigs: IConnectionConfig[] = [
|
||||
{
|
||||
"key": "zeroDateTimeBehavior",
|
||||
"value": "convertToNull"
|
||||
},
|
||||
{
|
||||
"key": "useInformationSchema",
|
||||
"value": "true"
|
||||
}
|
||||
],
|
||||
type: DatabaseTypeCode.MYSQL,
|
||||
|
@ -1,14 +1,83 @@
|
||||
package ai.chat2db.plugin.clickhouse;
|
||||
|
||||
import java.sql.Connection;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
import ai.chat2db.spi.sql.ConnectInfo;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.sql.*;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ClickHouseDBManage extends DefaultDBManage implements DBManage {
|
||||
@Override
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTablesOrViewsOrDictionaries(connection, sqlBuilder, databaseName, containData);
|
||||
exportFunctions(connection, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private void exportFunctions(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql ="SELECT name,create_query from system.functions where origin='SQLUserDefined'";
|
||||
try(ResultSet resultSet=connection.createStatement().executeQuery(sql)){
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("DROP FUNCTION IF EXISTS ").append(resultSet.getString("name")).append(";")
|
||||
.append("\n")
|
||||
.append(resultSet.getString("create_query")).append(";").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTablesOrViewsOrDictionaries(Connection connection, StringBuilder sqlBuilder, String databaseName, boolean containData) throws SQLException {
|
||||
String sql =String.format("SELECT create_table_query, has_own_data,engine,name from system.`tables` WHERE `database`='%s'", databaseName);
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String ddl = resultSet.getString("create_table_query");
|
||||
boolean dataFlag = resultSet.getInt("has_own_data") == 1;
|
||||
String tableType = resultSet.getString("engine");
|
||||
String tableOrViewName = resultSet.getString("name");
|
||||
if (Objects.equals("View", tableType)) {
|
||||
sqlBuilder.append("DROP VIEW IF EXISTS ").append(databaseName).append(".").append(tableOrViewName)
|
||||
.append(";").append("\n").append(ddl).append(";").append("\n");
|
||||
} else if (Objects.equals("Dictionary", tableType)) {
|
||||
sqlBuilder.append("DROP DICTIONARY IF EXISTS ").append(databaseName).append(".").append(tableOrViewName)
|
||||
.append(";").append("\n").append(ddl).append(";").append("\n");
|
||||
} else {
|
||||
sqlBuilder.append("DROP TABLE IF EXISTS ").append(databaseName).append(".").append(tableOrViewName)
|
||||
.append(";").append("\n").append(ddl).append(";").append("\n");
|
||||
if (containData && dataFlag) {
|
||||
exportTableData(connection, tableOrViewName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTableData(Connection connection, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("select * from %s", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Connection getConnection(ConnectInfo connectInfo) {
|
||||
|
@ -36,16 +36,29 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
+ "TRIGGER_SCHEMA = '%s' AND TRIGGER_NAME = '%s';";
|
||||
private static String TRIGGER_SQL_LIST
|
||||
= "SELECT TRIGGER_NAME FROM INFORMATION_SCHEMA.TRIGGERS where TRIGGER_SCHEMA = '%s';";
|
||||
private static String SELECT_TABLE_COLUMNS = "SELECT * FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' order by ORDINAL_POSITION";
|
||||
private static String SELECT_TABLE_COLUMNS = "select * from `system`.columns where table ='%s' and database='%s';";
|
||||
private static String VIEW_SQL
|
||||
= "SELECT TABLE_SCHEMA AS DatabaseName, TABLE_NAME AS ViewName, VIEW_DEFINITION AS definition, CHECK_OPTION, "
|
||||
+ "IS_UPDATABLE FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s';";
|
||||
= "SELECT create_table_query from system.`tables` WHERE `database`='%s' and name='%s'";
|
||||
private List<String> systemDatabases = Arrays.asList("information_schema", "system");
|
||||
public static final String FUNCTION_SQL = "SELECT name,create_query as ddl from system.functions where origin='SQLUserDefined'";
|
||||
|
||||
public static String format(String tableName) {
|
||||
return "`" + tableName + "`";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Function> functions(Connection connection, String databaseName, String schemaName) {
|
||||
return SQLExecutor.getInstance().execute(connection, FUNCTION_SQL, resultSet -> {
|
||||
List<Function> functions = new ArrayList<>();
|
||||
while (resultSet.next()) {
|
||||
Function function = new Function();
|
||||
function.setFunctionName(resultSet.getString("name"));
|
||||
functions.add(function);
|
||||
}
|
||||
return functions;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Database> databases(Connection connection) {
|
||||
List<Database> list = SQLExecutor.getInstance().execute(connection, "SELECT name FROM system.databases;;", resultSet -> {
|
||||
@ -81,17 +94,15 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
@Override
|
||||
public Function function(Connection connection, @NotEmpty String databaseName, String schemaName,
|
||||
String functionName) {
|
||||
|
||||
String sql = String.format(ROUTINES_SQL, "FUNCTION", databaseName, functionName);
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
return SQLExecutor.getInstance().execute(connection, FUNCTION_SQL, resultSet -> {
|
||||
Function function = new Function();
|
||||
function.setDatabaseName(databaseName);
|
||||
function.setSchemaName(schemaName);
|
||||
function.setFunctionName(functionName);
|
||||
if (resultSet.next()) {
|
||||
function.setSpecificName(resultSet.getString("SPECIFIC_NAME"));
|
||||
function.setRemarks(resultSet.getString("ROUTINE_COMMENT"));
|
||||
function.setFunctionBody(resultSet.getString("ROUTINE_DEFINITION"));
|
||||
/* function.setSpecificName(resultSet.getString("SPECIFIC_NAME"));
|
||||
function.setRemarks(resultSet.getString("ROUTINE_COMMENT"));*/
|
||||
function.setFunctionBody(resultSet.getString("ddl"));
|
||||
}
|
||||
return function;
|
||||
});
|
||||
@ -151,7 +162,7 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
@Override
|
||||
public List<TableColumn> columns(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
String sql = String.format(SELECT_TABLE_COLUMNS, databaseName, tableName);
|
||||
String sql = String.format(SELECT_TABLE_COLUMNS, tableName, databaseName);
|
||||
List<TableColumn> tableColumns = new ArrayList<>();
|
||||
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
@ -159,22 +170,22 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
TableColumn column = new TableColumn();
|
||||
column.setDatabaseName(databaseName);
|
||||
column.setTableName(tableName);
|
||||
column.setOldName(resultSet.getString("COLUMN_NAME"));
|
||||
column.setName(resultSet.getString("COLUMN_NAME"));
|
||||
String dataType = resultSet.getString("DATA_TYPE");
|
||||
column.setOldName(resultSet.getString("name"));
|
||||
column.setName(resultSet.getString("name"));
|
||||
String dataType = resultSet.getString("type");
|
||||
if (dataType.startsWith("Nullable(")) {
|
||||
dataType = dataType.substring(9, dataType.length() - 1);
|
||||
column.setNullable(1);
|
||||
}
|
||||
column.setColumnType(dataType);
|
||||
column.setDefaultValue(resultSet.getString("COLUMN_DEFAULT"));
|
||||
column.setAutoIncrement(resultSet.getString("EXTRA").contains("auto_increment"));
|
||||
column.setComment(resultSet.getString("COLUMN_COMMENT"));
|
||||
column.setNullable("YES".equalsIgnoreCase(resultSet.getString("IS_NULLABLE")) ? 1 : 0);
|
||||
column.setOrdinalPosition(resultSet.getInt("ORDINAL_POSITION"));
|
||||
column.setDecimalDigits(resultSet.getInt("NUMERIC_SCALE"));
|
||||
column.setCharSetName(resultSet.getString("CHARACTER_SET_NAME"));
|
||||
column.setCollationName(resultSet.getString("COLLATION_NAME"));
|
||||
setColumnSize(column, resultSet.getString("COLUMN_TYPE"));
|
||||
column.setDefaultValue(resultSet.getString("default_expression"));
|
||||
// column.setAutoIncrement(resultSet.getString("EXTRA").contains("auto_increment"));
|
||||
column.setComment(resultSet.getString("comment"));
|
||||
column.setOrdinalPosition(resultSet.getInt("position"));
|
||||
column.setDecimalDigits(resultSet.getInt("numeric_scale"));
|
||||
/*column.setCharSetName(resultSet.getString("CHARACTER_SET_NAME"));
|
||||
column.setCollationName(resultSet.getString("COLLATION_NAME"));*/
|
||||
setColumnSize(column, dataType);
|
||||
tableColumns.add(column);
|
||||
}
|
||||
return tableColumns;
|
||||
@ -214,7 +225,7 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
table.setSchemaName(schemaName);
|
||||
table.setName(viewName);
|
||||
if (resultSet.next()) {
|
||||
table.setDdl(resultSet.getString("definition"));
|
||||
table.setDdl(resultSet.getString(1));
|
||||
}
|
||||
return table;
|
||||
});
|
||||
@ -290,4 +301,11 @@ public class ClickHouseMetaData extends DefaultMetaService implements MetaData {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "`" + name + "`").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return systemDatabases;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import ai.chat2db.spi.model.TableIndex;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
|
||||
public class ClickHouseSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class ClickHouseSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -13,10 +13,7 @@ import ai.chat2db.plugin.db2.type.DB2IndexTypeEnum;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.Schema;
|
||||
import ai.chat2db.spi.model.TableIndex;
|
||||
import ai.chat2db.spi.model.TableIndexColumn;
|
||||
import ai.chat2db.spi.model.TableMeta;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import ai.chat2db.spi.util.SortUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
@ -130,6 +127,22 @@ public class DB2MetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
}
|
||||
|
||||
private static String VIEW_DDL_SQL="select TEXT from syscat.views where VIEWSCHEMA='%s' and VIEWNAME='%s';";
|
||||
@Override
|
||||
public Table view(Connection connection, String databaseName, String schemaName, String viewName) {
|
||||
String sql = String.format(VIEW_DDL_SQL, schemaName, viewName);
|
||||
Table table = new Table();
|
||||
table.setDatabaseName(databaseName);
|
||||
table.setSchemaName(schemaName);
|
||||
table.setName(viewName);
|
||||
SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
if (resultSet.next()) {
|
||||
table.setDdl(resultSet.getString("TEXT")+";");
|
||||
}
|
||||
});
|
||||
return table;
|
||||
}
|
||||
|
||||
private TableIndexColumn getTableIndexColumn(ResultSet resultSet) throws SQLException {
|
||||
TableIndexColumn tableIndexColumn = new TableIndexColumn();
|
||||
tableIndexColumn.setColumnName(resultSet.getString("COLNAME"));
|
||||
@ -161,4 +174,8 @@ public class DB2MetaData extends DefaultMetaService implements MetaData {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
package ai.chat2db.plugin.dm;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.*;
|
||||
import java.util.Objects;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
@ -12,6 +12,156 @@ import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class DMDBManage extends DefaultDBManage implements DBManage {
|
||||
private String format(String tableName) {
|
||||
return "\"" + tableName + "\"";
|
||||
}
|
||||
private static String ROUTINES_SQL
|
||||
= "SELECT OWNER, NAME, TEXT FROM ALL_SOURCE WHERE TYPE = '%s' AND OWNER = '%s' AND NAME = '%s' ORDER BY LINE";
|
||||
private static String TRIGGER_SQL_LIST = "SELECT OWNER, TRIGGER_NAME FROM ALL_TRIGGERS WHERE OWNER = '%s'";
|
||||
|
||||
private static String TRIGGER_SQL
|
||||
= "SELECT OWNER, TRIGGER_NAME, TABLE_OWNER, TABLE_NAME, TRIGGERING_TYPE, TRIGGERING_EVENT, STATUS, TRIGGER_BODY "
|
||||
+ "FROM ALL_TRIGGERS WHERE OWNER = '%s' AND TRIGGER_NAME = '%s'";
|
||||
|
||||
@Override
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTables(connection, sqlBuilder, schemaName, containData);
|
||||
exportViews(connection, schemaName, sqlBuilder);
|
||||
exportProcedures(connection, schemaName, sqlBuilder);
|
||||
exportTriggers(connection,schemaName, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private void exportTables(Connection connection, StringBuilder sqlBuilder, String schemaName, boolean containData) throws SQLException {
|
||||
String sql =String.format("SELECT TABLE_NAME FROM ALL_TABLES where OWNER='%s' and TABLESPACE_NAME='MAIN'", schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String tableName = resultSet.getString("TABLE_NAME");
|
||||
exportTable(connection, tableName, schemaName, sqlBuilder, containData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTable(Connection connection, String tableName, String schemaName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
String sql = """
|
||||
SELECT
|
||||
(SELECT comments FROM user_tab_comments WHERE table_name = '%s') AS comments,
|
||||
(SELECT dbms_metadata.get_ddl('TABLE', '%s', '%s') FROM dual) AS ddl
|
||||
FROM dual;
|
||||
""";
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(String.format(sql, tableName, tableName, schemaName))) {
|
||||
String formatSchemaName = format(schemaName);
|
||||
String formatTableName = format(tableName);
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("DROP TABLE IF EXISTS ").append(formatSchemaName).append(".").append(formatTableName)
|
||||
.append(";").append("\n")
|
||||
.append(resultSet.getString("ddl")).append("\n");
|
||||
String comment = resultSet.getString("comments");
|
||||
if (StringUtils.isNotBlank(comment)) {
|
||||
sqlBuilder.append("COMMENT ON TABLE ").append(formatSchemaName).append(".").append(formatTableName)
|
||||
.append(" IS ").append("'").append(comment).append("';");
|
||||
}
|
||||
exportTableColumnComment(connection, schemaName, tableName, sqlBuilder);
|
||||
}
|
||||
if (containData) {
|
||||
exportTableData(connection, schemaName, tableName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTableColumnComment(Connection connection, String schemaName, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql =String.format("select COLNAME,COMMENT$ from SYS.SYSCOLUMNCOMMENTS\n" +
|
||||
"where SCHNAME = '%s' and TVNAME = '%s'and TABLE_TYPE = 'TABLE';", schemaName,tableName);
|
||||
try(ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String columnName = resultSet.getString("COLNAME");
|
||||
String comment = resultSet.getString("COMMENT$");
|
||||
sqlBuilder.append("COMMENT ON COLUMN ").append(format(schemaName)).append(".").append(format(tableName))
|
||||
.append(".").append(format(columnName)).append(" IS ").append("'").append(comment).append("';").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTableData(Connection connection, String schemaName, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql =String.format("SELECT * FROM %s.%s",schemaName,tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
private void exportViews(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(null, schemaName, null, new String[]{"VIEW"})) {
|
||||
while (resultSet.next()) {
|
||||
String viewName = resultSet.getString("TABLE_NAME");
|
||||
exportView(connection, viewName, schemaName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportView(Connection connection, String viewName, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT DBMS_METADATA.GET_DDL('VIEW','%s','%s') as ddl FROM DUAL;", viewName, schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("ddl")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedures(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getProcedures(null, schemaName, null)) {
|
||||
while (resultSet.next()) {
|
||||
String procedureName = resultSet.getString("PROCEDURE_NAME");
|
||||
exportProcedure(connection, schemaName,procedureName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedure(Connection connection, String schemaName, String procedureName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format(ROUTINES_SQL,"PROC", schemaName,procedureName);
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("TEXT")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTriggers(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql =String.format(TRIGGER_SQL_LIST, schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String triggerName = resultSet.getString("TRIGGER_NAME");
|
||||
exportTrigger(connection,schemaName, triggerName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTrigger(Connection connection, String schemaName, String triggerName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format(TRIGGER_SQL, schemaName,triggerName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("TRIGGER_BODY")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectDatabase(Connection connection, String database) {
|
||||
|
@ -1,11 +1,5 @@
|
||||
package ai.chat2db.plugin.dm;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import ai.chat2db.plugin.dm.builder.DMSqlBuilder;
|
||||
import ai.chat2db.plugin.dm.type.DMColumnTypeEnum;
|
||||
import ai.chat2db.plugin.dm.type.DMDefaultValueEnum;
|
||||
@ -16,11 +10,16 @@ import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import ai.chat2db.spi.util.SortUtils;
|
||||
import ai.chat2db.spi.util.SqlUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class DMMetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
private List<String> systemSchemas = Arrays.asList("CTISYS", "SYS","SYSDBA","SYSSSO","SYSAUDITOR");
|
||||
@ -30,21 +29,40 @@ public class DMMetaData extends DefaultMetaService implements MetaData {
|
||||
List<Schema> schemas = SQLExecutor.getInstance().schemas(connection, databaseName, null);
|
||||
return SortUtils.sortSchema(schemas, systemSchemas);
|
||||
}
|
||||
private String format(String tableName){
|
||||
return "\"" + tableName + "\"";
|
||||
}
|
||||
|
||||
public String tableDDL(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
String selectObjectDDLSQL = String.format(
|
||||
"select dbms_metadata.get_ddl(%s, %s, %s) AS \"sql\" from dual",
|
||||
SqlUtils.formatSQLString("TABLE"), SqlUtils.formatSQLString(tableName),
|
||||
SqlUtils.formatSQLString(schemaName));
|
||||
return SQLExecutor.getInstance().execute(connection, selectObjectDDLSQL, resultSet -> {
|
||||
try {
|
||||
if (resultSet.next()) {
|
||||
return resultSet.getString("sql");
|
||||
String sql = """
|
||||
SELECT
|
||||
(SELECT comments FROM user_tab_comments WHERE table_name = '%s') AS comments,
|
||||
(SELECT dbms_metadata.get_ddl('TABLE', '%s', '%s') FROM dual) AS ddl
|
||||
FROM dual;
|
||||
""";
|
||||
StringBuilder ddlBuilder = new StringBuilder();
|
||||
String tableDDLSql = String.format(sql, tableName, tableName, schemaName);
|
||||
SQLExecutor.getInstance().execute(connection, tableDDLSql, resultSet -> {
|
||||
if (resultSet.next()) {
|
||||
String ddl = resultSet.getString("ddl");
|
||||
String comment = resultSet.getString("comments");
|
||||
if (StringUtils.isNotBlank(comment)) {
|
||||
ddlBuilder.append(ddl).append("\n").append("COMMENT ON TABLE ").append(format(schemaName))
|
||||
.append(".").append(format(tableName)).append(" IS ").append("'").append(comment).append("';");
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
String columnCommentsSql =String.format("select COLNAME,COMMENT$ from SYS.SYSCOLUMNCOMMENTS\n" +
|
||||
"where SCHNAME = '%s' and TVNAME = '%s'and TABLE_TYPE = 'TABLE';", schemaName,tableName);
|
||||
SQLExecutor.getInstance().execute(connection, columnCommentsSql, resultSet->{
|
||||
while (resultSet.next()) {
|
||||
String columnName = resultSet.getString("COLNAME");
|
||||
String comment = resultSet.getString("COMMENT$");
|
||||
ddlBuilder.append("COMMENT ON COLUMN ").append(format(schemaName)).append(".").append(format(tableName))
|
||||
.append(".").append(format(columnName)).append(" IS ").append("'").append(comment).append("';").append("\n");
|
||||
}
|
||||
});
|
||||
return ddlBuilder.toString();
|
||||
}
|
||||
|
||||
private static String ROUTINES_SQL
|
||||
@ -90,7 +108,7 @@ public class DMMetaData extends DefaultMetaService implements MetaData {
|
||||
}
|
||||
|
||||
private static String TRIGGER_SQL
|
||||
= "SELECT OWNER, TRIGGER_NAME, TABLE_OWNER, TABLE_NAME, TRIGGER_TYPE, TRIGGERING_EVENT, STATUS, TRIGGER_BODY "
|
||||
= "SELECT OWNER, TRIGGER_NAME, TABLE_OWNER, TABLE_NAME, TRIGGERING_TYPE, TRIGGERING_EVENT, STATUS, TRIGGER_BODY "
|
||||
+ "FROM ALL_TRIGGERS WHERE OWNER = '%s' AND TRIGGER_NAME = '%s'";
|
||||
|
||||
private static String TRIGGER_SQL_LIST = "SELECT OWNER, TRIGGER_NAME FROM ALL_TRIGGERS WHERE OWNER = '%s'";
|
||||
@ -227,4 +245,10 @@ public class DMMetaData extends DefaultMetaService implements MetaData {
|
||||
public String getMetaDataName(String... names) {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package ai.chat2db.plugin.dm.builder;
|
||||
|
||||
import ai.chat2db.plugin.dm.type.DMColumnTypeEnum;
|
||||
import ai.chat2db.plugin.dm.type.DMIndexTypeEnum;
|
||||
import ai.chat2db.spi.enums.EditStatus;
|
||||
import ai.chat2db.spi.jdbc.DefaultSqlBuilder;
|
||||
import ai.chat2db.spi.model.Schema;
|
||||
import ai.chat2db.spi.model.Table;
|
||||
@ -9,6 +10,8 @@ import ai.chat2db.spi.model.TableColumn;
|
||||
import ai.chat2db.spi.model.TableIndex;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class DMSqlBuilder extends DefaultSqlBuilder {
|
||||
|
||||
|
||||
@ -79,10 +82,11 @@ public class DMSqlBuilder extends DefaultSqlBuilder {
|
||||
|
||||
// append modify column
|
||||
for (TableColumn tableColumn : newTable.getColumnList()) {
|
||||
if (StringUtils.isNotBlank(tableColumn.getEditStatus())) {
|
||||
String editStatus = tableColumn.getEditStatus();
|
||||
if (StringUtils.isNotBlank(editStatus)) {
|
||||
DMColumnTypeEnum typeEnum = DMColumnTypeEnum.getByType(tableColumn.getColumnType());
|
||||
script.append("\t").append(typeEnum.buildModifyColumn(tableColumn)).append(";\n");
|
||||
if (StringUtils.isNotBlank(tableColumn.getComment())) {
|
||||
if (StringUtils.isNotBlank(tableColumn.getComment())&&!Objects.equals(EditStatus.DELETE.toString(),editStatus)) {
|
||||
script.append("\n").append(buildComment(tableColumn)).append(";\n");
|
||||
}
|
||||
}
|
||||
|
@ -41,6 +41,7 @@ public enum DMColumnTypeEnum implements ColumnBuilder {
|
||||
DATE("DATE", false, false, true, false, false, false, true, true, false, false),
|
||||
|
||||
DECIMAL("DECIMAL", true, true, true, false, false, false, true, true, false, false),
|
||||
DEC("DEC", true, true, true, false, false, false, true, true, false, false),
|
||||
|
||||
DOUBLE("DOUBLE", false, false, true, false, false, false, true, true, false, false),
|
||||
|
||||
@ -84,7 +85,8 @@ public enum DMColumnTypeEnum implements ColumnBuilder {
|
||||
LONGVARBINARY("LONGVARBINARY", false, false, true, false, false, false, true, true, false, false),
|
||||
|
||||
|
||||
LONGVARCHAR("LONGVARCHAR", true, false, true, false, false, false, true, true, false, false),
|
||||
LONGVARCHAR("LONGVARCHAR", false, false, true, false, false, false, true, true, false, false),
|
||||
TEXT("TEXT", false, false, true, false, false, false, true, true, false, false),
|
||||
|
||||
|
||||
NUMBERIC("NUMBERIC", true, true, true, false, false, false, true, true, false, false),
|
||||
@ -209,7 +211,7 @@ public enum DMColumnTypeEnum implements ColumnBuilder {
|
||||
|
||||
private String buildDataType(TableColumn column, DMColumnTypeEnum type) {
|
||||
String columnType = type.columnType.getTypeName();
|
||||
if (Arrays.asList(CHAR, VARCHAR, VARCHAR2, LONGVARCHAR).contains(type)) {
|
||||
if (Arrays.asList(CHAR, VARCHAR, VARCHAR2, LONGVARCHAR,TEXT).contains(type)) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append(columnType);
|
||||
if (column.getColumnSize() != null && StringUtils.isEmpty(column.getUnit())) {
|
||||
@ -220,7 +222,7 @@ public enum DMColumnTypeEnum implements ColumnBuilder {
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
if (Arrays.asList(DECIMAL, FLOAT, NUMBER, TIMESTAMP, NUMBERIC).contains(type)) {
|
||||
if (Arrays.asList(DECIMAL,DEC, FLOAT, NUMBER, TIMESTAMP, NUMBERIC).contains(type)) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append(columnType);
|
||||
if (column.getColumnSize() != null && column.getDecimalDigits() == null) {
|
||||
|
@ -205,4 +205,9 @@ public class H2Meta extends DefaultMetaService implements MetaData {
|
||||
public String getMetaDataName(String... names) {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import ai.chat2db.spi.jdbc.DefaultSqlBuilder;
|
||||
import ai.chat2db.spi.model.Schema;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class H2SqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class H2SqlBuilder extends DefaultSqlBuilder {
|
||||
|
||||
@Override
|
||||
public String buildCreateSchemaSql(Schema schema) {
|
||||
|
@ -30,6 +30,9 @@ public class KingBaseMetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
private List<String> systemDatabases = Arrays.asList("SAMPLES", "SECURITY");
|
||||
|
||||
|
||||
private List<String> systemSchemas = Arrays.asList("pg_toast","pg_temp_1","pg_toast_temp_1","pg_catalog","information_schema");
|
||||
|
||||
@Override
|
||||
public List<Database> databases(Connection connection) {
|
||||
List<Database> list = SQLExecutor.getInstance().execute(connection, "SELECT datname FROM sys_database", resultSet -> {
|
||||
@ -203,4 +206,14 @@ public class KingBaseMetaData extends DefaultMetaService implements MetaData {
|
||||
public String getMetaDataName(String... names) {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return systemDatabases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
public class KingBaseSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class KingBaseSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -0,0 +1,17 @@
|
||||
package ai.chat2db.plugin.mongodb;
|
||||
|
||||
import ai.chat2db.spi.model.Command;
|
||||
import ai.chat2db.spi.model.ExecuteResult;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class MongodbCommandExecutor extends SQLExecutor {
|
||||
|
||||
@Override
|
||||
public List<ExecuteResult> executeSelectTable(Command command) {
|
||||
String sql = "db." + command.getTableName() + ".find()";
|
||||
command.setScript(sql);
|
||||
return execute(command);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
package ai.chat2db.plugin.mongodb;
|
||||
|
||||
import ai.chat2db.spi.CommandExecutor;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.Database;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import java.sql.Connection;
|
||||
@ -17,4 +17,9 @@ public class MongodbMetaData extends DefaultMetaService implements MetaData {
|
||||
public List<Database> databases(Connection connection) {
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommandExecutor getCommandExecutor() {
|
||||
return new MongodbCommandExecutor();
|
||||
}
|
||||
}
|
||||
|
@ -7,34 +7,54 @@ import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import java.sql.*;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MysqlDBManage extends DefaultDBManage implements DBManage {
|
||||
@Override
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTables(connection, sqlBuilder, containData);
|
||||
exportViews(connection, sqlBuilder);
|
||||
exportTables(connection, databaseName, sqlBuilder, containData);
|
||||
exportViews(connection, databaseName, sqlBuilder);
|
||||
exportProcedures(connection, sqlBuilder);
|
||||
exportTriggers(connection, sqlBuilder);
|
||||
exportFunctions(connection, databaseName, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
private void exportTables(Connection connection,StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet tables = statement.executeQuery("SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'")) {
|
||||
while (tables.next()) {
|
||||
String tableName = tables.getString(1);
|
||||
exportTable(connection, tableName, sqlBuilder, containData);
|
||||
|
||||
private void exportFunctions(Connection connection, String databaseName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getFunctions(databaseName, null, null)) {
|
||||
while (resultSet.next()) {
|
||||
exportFunction(connection, resultSet.getString("FUNCTION_NAME"), sqlBuilder);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void exportFunction(Connection connection, String functionName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SHOW CREATE FUNCTION %s;", functionName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("DROP FUNCTION IF EXISTS ").append(functionName).append(";").append("\n")
|
||||
.append(resultSet.getString("Create Function")).append(";").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTables(Connection connection, String databaseName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(databaseName, null, null, new String[]{"TABLE", "SYSTEM TABLE"})) {
|
||||
while (resultSet.next()) {
|
||||
exportTable(connection, resultSet.getString("TABLE_NAME"), sqlBuilder, containData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTable(Connection connection, String tableName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("show create table " + tableName)) {
|
||||
String sql = String.format("show create table %s ", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
String createTableSql = "DROP TABLE IF EXISTS `" + tableName + "`;\n" +
|
||||
resultSet.getString(2) + ";\n";
|
||||
sqlBuilder.append(createTableSql).append("\n");
|
||||
|
||||
sqlBuilder.append("DROP TABLE IF EXISTS ").append(format(tableName)).append(";").append("\n")
|
||||
.append(resultSet.getString("Create Table")).append(";").append("\n");
|
||||
if (containData) {
|
||||
exportTableData(connection, tableName, sqlBuilder);
|
||||
}
|
||||
@ -43,63 +63,69 @@ public class MysqlDBManage extends DefaultDBManage implements DBManage {
|
||||
}
|
||||
|
||||
private void exportTableData(Connection connection, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
StringBuilder insertSql = new StringBuilder();
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("select * from " + tableName)) {
|
||||
String sql = String.format("select * from %s", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
insertSql.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
insertSql.append("'").append(resultSet.getString(i)).append("'");
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
insertSql.append(", ");
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
insertSql.append(");\n");
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
insertSql.append("\n");
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
sqlBuilder.append(insertSql);
|
||||
}
|
||||
|
||||
private void exportViews(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW FULL TABLES WHERE Table_type = 'VIEW'")) {
|
||||
private void exportViews(Connection connection, String databaseName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(databaseName, null, null, new String[]{"VIEW"})) {
|
||||
while (resultSet.next()) {
|
||||
String viewName = resultSet.getString(1);
|
||||
exportView(connection, viewName, sqlBuilder);
|
||||
exportView(connection, resultSet.getString("TABLE_NAME"), sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportView(Connection connection, String viewName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW CREATE VIEW " + viewName)) {
|
||||
String sql = String.format("show create view %s ", viewName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
String createViewSql = "DROP VIEW IF EXISTS `" + viewName + "`;\n" + resultSet.getString("Create View") + ";\n";
|
||||
sqlBuilder.append(createViewSql).append("\n");
|
||||
sqlBuilder.append("DROP VIEW IF EXISTS ").append(format(viewName)).append(";").append("\n")
|
||||
.append(resultSet.getString("Create View")).append(";").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedures(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW PROCEDURE STATUS WHERE Db = DATABASE()")) {
|
||||
String sql = "SHOW PROCEDURE STATUS WHERE Db = DATABASE()";
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String procedureName = resultSet.getString("Name");
|
||||
exportProcedure(connection, procedureName, sqlBuilder);
|
||||
exportProcedure(connection, resultSet.getString("Name"), sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedure(Connection connection, String procedureName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW CREATE PROCEDURE " + procedureName)) {
|
||||
String sql = String.format("show create procedure %s ", procedureName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
String createProcedureSql = "DROP PROCEDURE IF EXISTS `" + procedureName + "`;\n" +
|
||||
"delimiter ;;\n" + resultSet.getString("Create Procedure") + ";;\n" + "delimiter ;\n";
|
||||
sqlBuilder.append(createProcedureSql).append("\n");
|
||||
sqlBuilder.append("DROP PROCEDURE IF EXISTS ").append(format(procedureName)).append(";").append("\n")
|
||||
.append("delimiter ;;").append("\n").append(resultSet.getString("Create Procedure")).append(";;")
|
||||
.append("\n").append("delimiter ;").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTriggers(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW TRIGGERS")) {
|
||||
String sql = "SHOW TRIGGERS";
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String triggerName = resultSet.getString("Trigger");
|
||||
exportTrigger(connection, triggerName, sqlBuilder);
|
||||
@ -108,12 +134,12 @@ public class MysqlDBManage extends DefaultDBManage implements DBManage {
|
||||
}
|
||||
|
||||
private void exportTrigger(Connection connection, String triggerName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW CREATE TRIGGER " + triggerName)) {
|
||||
String sql = String.format("show create trigger %s ", triggerName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
String createTriggerSql = "DROP TRIGGER IF EXISTS `" + triggerName + "`;\n" +
|
||||
"delimiter ;;\n" + resultSet.getString("SQL Original Statement") + ";;\n" +
|
||||
"delimiter ;\n";
|
||||
sqlBuilder.append(createTriggerSql).append("\n");
|
||||
sqlBuilder.append("DROP TRIGGER IF EXISTS ").append(format(triggerName)).append(";").append("\n")
|
||||
.append("delimiter ;;").append("\n").append(resultSet.getString("SQL Original Statement")).append(";;")
|
||||
.append("\n").append("delimiter ;").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -140,7 +166,7 @@ public class MysqlDBManage extends DefaultDBManage implements DBManage {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
SQLExecutor.getInstance().execute(connection,"use `" + database + "`;");
|
||||
SQLExecutor.getInstance().execute(connection, "use `" + database + "`;");
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -149,8 +175,8 @@ public class MysqlDBManage extends DefaultDBManage implements DBManage {
|
||||
|
||||
@Override
|
||||
public void dropTable(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
String sql = "DROP TABLE "+ format(tableName);
|
||||
SQLExecutor.getInstance().execute(connection,sql, resultSet -> null);
|
||||
String sql = "DROP TABLE " + format(tableName);
|
||||
SQLExecutor.getInstance().execute(connection, sql, resultSet -> null);
|
||||
}
|
||||
|
||||
public static String format(String tableName) {
|
||||
|
@ -10,6 +10,7 @@ import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.checkerframework.checker.units.qual.A;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
@ -57,19 +58,25 @@ public class MysqlMetaData extends DefaultMetaService implements MetaData {
|
||||
public Function function(Connection connection, @NotEmpty String databaseName, String schemaName,
|
||||
String functionName) {
|
||||
|
||||
String sql = String.format(ROUTINES_SQL, "FUNCTION", databaseName, functionName);
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
Function function = new Function();
|
||||
function.setDatabaseName(databaseName);
|
||||
function.setSchemaName(schemaName);
|
||||
function.setFunctionName(functionName);
|
||||
String functionInfoSql = String.format(ROUTINES_SQL, "FUNCTION", databaseName, functionName);
|
||||
Function function = SQLExecutor.getInstance().execute(connection, functionInfoSql, resultSet -> {
|
||||
Function f = new Function();
|
||||
f.setDatabaseName(databaseName);
|
||||
f.setSchemaName(schemaName);
|
||||
f.setFunctionName(functionName);
|
||||
if (resultSet.next()) {
|
||||
function.setSpecificName(resultSet.getString("SPECIFIC_NAME"));
|
||||
function.setRemarks(resultSet.getString("ROUTINE_COMMENT"));
|
||||
function.setFunctionBody(resultSet.getString("ROUTINE_DEFINITION"));
|
||||
f.setSpecificName(resultSet.getString("SPECIFIC_NAME"));
|
||||
f.setRemarks(resultSet.getString("ROUTINE_COMMENT"));
|
||||
}
|
||||
return function;
|
||||
return f;
|
||||
});
|
||||
String functionDDlSql =String.format("SHOW CREATE FUNCTION %s", functionName);
|
||||
SQLExecutor.getInstance().execute(connection,functionDDlSql, resultSet -> {
|
||||
if (resultSet.next()) {
|
||||
function.setFunctionBody(resultSet.getString("Create Function"));
|
||||
}
|
||||
} );
|
||||
return function;
|
||||
|
||||
}
|
||||
|
||||
@ -113,6 +120,20 @@ public class MysqlMetaData extends DefaultMetaService implements MetaData {
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Procedure> procedures(Connection connection, String databaseName, String schemaName) {
|
||||
String sql = "SHOW PROCEDURE STATUS WHERE Db = DATABASE()";
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
ArrayList<Procedure> procedures = new ArrayList<>();
|
||||
Procedure procedure = new Procedure();
|
||||
while (resultSet.next()){
|
||||
procedure.setProcedureName(resultSet.getString("Name"));
|
||||
procedures.add(procedure);
|
||||
}
|
||||
return procedures;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Procedure procedure(Connection connection, @NotEmpty String databaseName, String schemaName,
|
||||
String procedureName) {
|
||||
@ -193,21 +214,18 @@ public class MysqlMetaData extends DefaultMetaService implements MetaData {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static String VIEW_SQL
|
||||
= "SELECT TABLE_SCHEMA AS DatabaseName, TABLE_NAME AS ViewName, VIEW_DEFINITION AS definition, CHECK_OPTION, "
|
||||
+ "IS_UPDATABLE FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s';";
|
||||
private static String VIEW_DDL_SQL="show create view %s";
|
||||
|
||||
@Override
|
||||
public Table view(Connection connection, String databaseName, String schemaName, String viewName) {
|
||||
String sql = String.format(VIEW_SQL, databaseName, viewName);
|
||||
String sql = String.format(VIEW_DDL_SQL, viewName);
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
Table table = new Table();
|
||||
table.setDatabaseName(databaseName);
|
||||
table.setSchemaName(schemaName);
|
||||
table.setName(viewName);
|
||||
if (resultSet.next()) {
|
||||
table.setDdl(resultSet.getString("definition"));
|
||||
table.setDdl(resultSet.getString("Create View"));
|
||||
}
|
||||
return table;
|
||||
});
|
||||
@ -303,4 +321,10 @@ public class MysqlMetaData extends DefaultMetaService implements MetaData {
|
||||
public ValueHandler getValueHandler() {
|
||||
return new MysqlValueHandler();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return systemDatabases;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
public class MysqlSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class MysqlSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -118,7 +118,7 @@ public class OracleMetaData extends DefaultMetaService implements MetaData {
|
||||
tableColumn.setName(resultSet.getString("COLUMN_NAME"));
|
||||
tableColumn.setColumnType(resultSet.getString("DATA_TYPE"));
|
||||
Integer dataPrecision = resultSet.getInt("DATA_PRECISION");
|
||||
if(dataPrecision!=null) {
|
||||
if(resultSet.getString("DATA_PRECISION") != null) {
|
||||
tableColumn.setColumnSize(dataPrecision);
|
||||
}else {
|
||||
tableColumn.setColumnSize(resultSet.getInt("DATA_LENGTH"));
|
||||
@ -249,8 +249,9 @@ public class OracleMetaData extends DefaultMetaService implements MetaData {
|
||||
return SQLExecutor.getInstance().execute(connection, String.format(TRIGGER_SQL_LIST, schemaName),
|
||||
resultSet -> {
|
||||
while (resultSet.next()) {
|
||||
String triggerName = resultSet.getString("TRIGGER_NAME");
|
||||
Trigger trigger = new Trigger();
|
||||
trigger.setTriggerName(resultSet.getString("TRIGGER_NAME"));
|
||||
trigger.setTriggerName(triggerName==null?"":triggerName.trim());
|
||||
trigger.setSchemaName(schemaName);
|
||||
trigger.setDatabaseName(databaseName);
|
||||
triggers.add(trigger);
|
||||
@ -263,6 +264,7 @@ public class OracleMetaData extends DefaultMetaService implements MetaData {
|
||||
public Trigger trigger(Connection connection, @NotEmpty String databaseName, String schemaName,
|
||||
String triggerName) {
|
||||
|
||||
|
||||
String sql = String.format(TRIGGER_DDL_SQL, schemaName, triggerName);
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
Trigger trigger = new Trigger();
|
||||
@ -330,4 +332,10 @@ public class OracleMetaData extends DefaultMetaService implements MetaData {
|
||||
public String getMetaDataName(String... names) {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -2,14 +2,13 @@ package ai.chat2db.plugin.oracle.builder;
|
||||
|
||||
import ai.chat2db.plugin.oracle.type.OracleColumnTypeEnum;
|
||||
import ai.chat2db.plugin.oracle.type.OracleIndexTypeEnum;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.jdbc.DefaultSqlBuilder;
|
||||
import ai.chat2db.spi.model.Table;
|
||||
import ai.chat2db.spi.model.TableColumn;
|
||||
import ai.chat2db.spi.model.TableIndex;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class OracleSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class OracleSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -1,7 +1,5 @@
|
||||
package ai.chat2db.plugin.postgresql;
|
||||
|
||||
import java.sql.Connection;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
import ai.chat2db.spi.sql.Chat2DBContext;
|
||||
@ -9,7 +7,119 @@ import ai.chat2db.spi.sql.ConnectInfo;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.sql.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Objects;
|
||||
|
||||
import static ai.chat2db.plugin.postgresql.consts.SQLConst.*;
|
||||
|
||||
public class PostgreSQLDBManage extends DefaultDBManage implements DBManage {
|
||||
|
||||
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTypes(connection, sqlBuilder);
|
||||
exportTables(connection, databaseName, schemaName, sqlBuilder, containData);
|
||||
exportViews(connection, schemaName, sqlBuilder);
|
||||
exportFunctions(connection, schemaName, sqlBuilder);
|
||||
exportTriggers(connection, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private void exportTypes(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(ENUM_TYPE_DDL_SQL)) {
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("ddl")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
private void exportTables(Connection connection, String databaseName, String schemaName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(databaseName, schemaName, null,
|
||||
new String[]{"TABLE", "SYSTEM TABLE","PARTITIONED TABLE"})) {
|
||||
ArrayList<String> tableNames = new ArrayList<>();
|
||||
while (resultSet.next()) {
|
||||
String tableName = resultSet.getString("TABLE_NAME");
|
||||
tableNames.add(tableName);
|
||||
}
|
||||
for (String tableName : tableNames) {
|
||||
exportTable(connection, schemaName, tableName, sqlBuilder);
|
||||
}
|
||||
if (containData) {
|
||||
for (String tableName : tableNames) {
|
||||
exportTableData(connection, schemaName, tableName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTable(Connection connection, String schemaName, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql =String.format( "select pg_get_tabledef('%s','%s',true,'COMMENTS') as ddl;", schemaName,tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("\n").append("DROP TABLE IF EXISTS ").append(tableName).append(";").append("\n")
|
||||
.append(resultSet.getString("ddl")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTableData(Connection connection, String schemaName, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql =String.format("select * from %s.%s", schemaName,tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportViews(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
|
||||
String sql = String.format("SELECT table_name, view_definition FROM information_schema.views WHERE table_schema = '%s'",schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String viewName = resultSet.getString("table_name");
|
||||
String viewDefinition = resultSet.getString("view_definition");
|
||||
sqlBuilder.append("CREATE OR REPLACE VIEW ").append(viewName).append(" AS ").append(viewDefinition).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportFunctions(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT proname, pg_get_functiondef(oid) AS function_definition FROM pg_proc " +
|
||||
"WHERE pronamespace = (SELECT oid FROM pg_namespace WHERE nspname = '%s')", schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String functionName = resultSet.getString("proname");
|
||||
String functionDefinition = resultSet.getString("function_definition");
|
||||
sqlBuilder.append("DROP FUNCTION IF EXISTS ").append(schemaName).append(".").append(functionName).append(";\n");
|
||||
sqlBuilder.append(functionDefinition).append(";\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTriggers(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = "SELECT pg_get_triggerdef(oid) AS trigger_definition FROM pg_trigger";
|
||||
try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("trigger_definition")).append(";").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectDatabase(Connection connection, String database) {
|
||||
try {
|
||||
|
@ -102,13 +102,12 @@ public class PostgreSQLMetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
@Override
|
||||
public String tableDDL(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
SQLExecutor.getInstance().execute(connection, FUNCTION_SQL.replaceFirst("tableSchema", schemaName),
|
||||
resultSet -> null);
|
||||
String ddlSql = "select showcreatetable('" + schemaName + "','" + tableName + "') as sql";
|
||||
SQLExecutor.getInstance().execute(connection, FUNCTION_SQL, resultSet -> null);
|
||||
String ddlSql = "select pg_get_tabledef" + "(" + "'" + schemaName + "'" + "," + "'" + tableName + "'" + "," + "false" + "," + "'" + "COMMENTS" + "'" + ")" + ";";
|
||||
return SQLExecutor.getInstance().execute(connection, ddlSql, resultSet -> {
|
||||
try {
|
||||
if (resultSet.next()) {
|
||||
return resultSet.getString("sql");
|
||||
return resultSet.getString(1);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
@ -306,4 +305,14 @@ public class PostgreSQLMetaData extends DefaultMetaService implements MetaData {
|
||||
public String getMetaDataName(String... names) {
|
||||
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "\"" + name + "\"").collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return systemDatabases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,6 @@ package ai.chat2db.plugin.postgresql.builder;
|
||||
|
||||
import ai.chat2db.plugin.postgresql.type.PostgreSQLColumnTypeEnum;
|
||||
import ai.chat2db.plugin.postgresql.type.PostgreSQLIndexTypeEnum;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.jdbc.DefaultSqlBuilder;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
@ -14,7 +13,7 @@ import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
public class PostgreSQLSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class PostgreSQLSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -2,152 +2,689 @@ package ai.chat2db.plugin.postgresql.consts;
|
||||
|
||||
public class SQLConst {
|
||||
public static String FUNCTION_SQL =
|
||||
" CREATE OR REPLACE FUNCTION showcreatetable(namespace character varying, tablename character "
|
||||
+ "varying)\n"
|
||||
+ " RETURNS character varying AS\n"
|
||||
+ "\n"
|
||||
+ " $BODY$\n"
|
||||
+ " declare\n"
|
||||
+ " tableScript character varying default '';\n"
|
||||
+ "\n"
|
||||
+ " begin\n"
|
||||
+ " -- columns\n"
|
||||
+ " tableScript:=tableScript || ' CREATE TABLE '|| tablename|| ' ( '|| chr(13)||chr(10) || "
|
||||
+ "array_to_string"
|
||||
+ "(\n"
|
||||
+ " array(\n"
|
||||
+ " select ' ' || concat_ws(' ',fieldName, fieldType, defaultValue, isNullStr"
|
||||
+ " ) as "
|
||||
+ "column_line\n"
|
||||
+ " from (\n"
|
||||
+ " select a.attname as fieldName,format_type(a.atttypid,a.atttypmod) as fieldType,"
|
||||
+ " CASE WHEN \n"
|
||||
+ " (SELECT substring(pg_catalog.pg_get_expr(B.adbin, B.adrelid) for 128)\n"
|
||||
+ " FROM pg_catalog.pg_attrdef B WHERE B.adrelid = A.attrelid AND B.adnum = A.attnum AND A.atthasdef) IS NOT NULL THEN\n"
|
||||
+ " 'DEFAULT '|| (SELECT substring(pg_catalog.pg_get_expr(B.adbin, B.adrelid) for 128)\n"
|
||||
+ " FROM pg_catalog.pg_attrdef B WHERE B.adrelid = A.attrelid AND B.adnum = A.attnum AND A.atthasdef)\n"
|
||||
+ " ELSE\n"
|
||||
+ " ''\n"
|
||||
+ " END as defaultValue,"
|
||||
+ " (case when a.attnotnull=true then 'not null' else 'null' end) as isNullStr\n"
|
||||
+ " from pg_attribute a where attstattarget=-1 and attrelid = (select c.oid from pg_class c,"
|
||||
+ "pg_namespace n"
|
||||
+ " where\n"
|
||||
+ " c.relnamespace=n.oid and n.nspname =namespace and relname =tablename)\n"
|
||||
+ "\n"
|
||||
+ " ) as string_columns\n"
|
||||
+ " ),','||chr(13)||chr(10)) || ',';\n"
|
||||
+ "\n"
|
||||
+ "\n"
|
||||
+ " -- 约束\n"
|
||||
+ " tableScript:= tableScript || chr(13)||chr(10) || array_to_string(\n"
|
||||
+ " array(\n"
|
||||
+ " select concat(' CONSTRAINT ',conname ,c ,u,p,f) from (\n"
|
||||
+ " select conname,\n"
|
||||
+ " case when contype='c' then ' CHECK('|| ( select findattname(namespace,tablename,'c') ) ||')' "
|
||||
+ "end "
|
||||
+ "as c "
|
||||
+ ",\n"
|
||||
+ " case when contype='u' then ' UNIQUE('|| ( select findattname(namespace,tablename,'u') ) ||')' "
|
||||
+ "end "
|
||||
+ "as u"
|
||||
+ " ,\n"
|
||||
+ " case when contype='p' then ' PRIMARY KEY ('|| ( select findattname(namespace,tablename,'p') ) "
|
||||
+ "||')' "
|
||||
+ "end as p ,\n"
|
||||
+ " case when contype='f' then ' FOREIGN KEY('|| ( select findattname(namespace,tablename,'u') ) "
|
||||
+ "||') "
|
||||
+ "REFERENCES '||\n"
|
||||
+ " (select p.relname from pg_class p where p.oid=c.confrelid ) || '('|| ( select\n"
|
||||
+ " findattname(namespace,tablename,'u') ) ||')' end as f\n"
|
||||
+ " from pg_constraint c\n"
|
||||
+ " where contype in('u','c','f','p') and conrelid=(\n"
|
||||
+ " select oid from pg_class where relname=tablename and relnamespace =(\n"
|
||||
+ " select oid from pg_namespace where nspname = namespace\n"
|
||||
+ " )\n"
|
||||
+ " )\n"
|
||||
+ " ) as t\n"
|
||||
+ " ) ,',' || chr(13)||chr(10) ) || chr(13)||chr(10) ||' ); ';\n"
|
||||
+ "\n"
|
||||
+ " -- indexs\n"
|
||||
+ " -- CREATE UNIQUE INDEX pg_language_oid_index ON pg_language USING btree (oid); -- table "
|
||||
+ "pg_language\n"
|
||||
+ "\n"
|
||||
+ "\n"
|
||||
+ " --\n"
|
||||
+ " /** **/\n"
|
||||
+ " --- 获取非约束索引 column\n"
|
||||
+ " -- CREATE UNIQUE INDEX pg_language_oid_index ON pg_language USING btree (oid); -- table "
|
||||
+ "pg_language\n"
|
||||
+ " tableScript:= tableScript || chr(13)||chr(10) || chr(13)||chr(10) || array_to_string(\n"
|
||||
+ " array(\n"
|
||||
+ " select 'CREATE INDEX ' || indexrelname || ' ON ' || tablename || ' USING btree '|| '(' || "
|
||||
+ "attname "
|
||||
+ "|| "
|
||||
+ "');' from (\n"
|
||||
+ " SELECT\n"
|
||||
+ " i.relname AS indexrelname , x.indkey,\n"
|
||||
+ "\n"
|
||||
+ " ( select array_to_string (\n"
|
||||
+ " array(\n"
|
||||
+ " select a.attname from pg_attribute a where attrelid=c.oid and a.attnum in ( select unnest(x"
|
||||
+ ".indkey) )\n"
|
||||
+ "\n"
|
||||
+ " )\n"
|
||||
+ " ,',' ) )as attname\n"
|
||||
+ "\n"
|
||||
+ " FROM pg_class c\n"
|
||||
+ " JOIN pg_index x ON c.oid = x.indrelid\n"
|
||||
+ " JOIN pg_class i ON i.oid = x.indexrelid\n"
|
||||
+ " LEFT JOIN pg_namespace n ON n.oid = c.relnamespace\n"
|
||||
+ " WHERE c.relname=tablename and i.relname not in\n"
|
||||
+ " ( select constraint_name from information_schema.key_column_usage where table_name=tablename )\n"
|
||||
+ " )as t\n"
|
||||
+ " ) ,','|| chr(13)||chr(10));\n"
|
||||
+ "\n"
|
||||
+ "\n"
|
||||
+ " -- COMMENT COMMENT ON COLUMN sys_activity.id IS '主键';\n"
|
||||
+ " tableScript:= tableScript || chr(13)||chr(10) || chr(13)||chr(10) || array_to_string(\n"
|
||||
+ " array(\n"
|
||||
+ " SELECT 'COMMENT ON COLUMN ' || 'namespace.tablename' || '.' || a.attname ||' IS '|| ''''|| d.description "
|
||||
+ "||''''\n"
|
||||
+ " FROM pg_class c\n"
|
||||
+ " JOIN pg_description d ON c.oid=d.objoid\n"
|
||||
+ " JOIN pg_attribute a ON c.oid = a.attrelid\n"
|
||||
+ " WHERE c.relname=tablename\n"
|
||||
+ " AND a.attnum = d.objsubid),';'|| chr(13)||chr(10)) ;\n"
|
||||
+ "\n"
|
||||
+ " return tableScript;\n"
|
||||
+ "\n"
|
||||
+ " end\n"
|
||||
+ " $BODY$ LANGUAGE plpgsql;\n"
|
||||
+ "\n"
|
||||
+ " CREATE OR REPLACE FUNCTION findattname(namespace character varying, tablename character "
|
||||
+ "varying, "
|
||||
+ "ctype"
|
||||
+ " character\n"
|
||||
+ " varying)\n"
|
||||
+ " RETURNS character varying as $BODY$\n"
|
||||
+ "\n"
|
||||
+ " declare\n"
|
||||
+ " tt oid ;\n"
|
||||
+ " aname character varying default '';\n"
|
||||
+ "\n"
|
||||
+ " begin\n"
|
||||
+ " tt := oid from pg_class where relname= tablename and relnamespace =(select oid from "
|
||||
+ "pg_namespace "
|
||||
+ "where\n"
|
||||
+ " nspname=namespace) ;\n"
|
||||
+ " aname:= array_to_string(\n"
|
||||
+ " array(\n"
|
||||
+ " select a.attname from pg_attribute a\n"
|
||||
+ " where a.attrelid=tt and a.attnum in (\n"
|
||||
+ " select unnest(conkey) from pg_constraint c where contype=ctype\n"
|
||||
+ " and conrelid=tt and array_to_string(conkey,',') is not null\n"
|
||||
+ " )\n"
|
||||
+ " ),',');\n"
|
||||
+ "\n"
|
||||
+ " return aname;\n"
|
||||
+ " end\n"
|
||||
+ " $BODY$ LANGUAGE plpgsql";
|
||||
"""
|
||||
DROP TYPE IF EXISTS public.tabledefs CASCADE;
|
||||
CREATE TYPE public.tabledefs AS ENUM ('PKEY_INTERNAL','PKEY_EXTERNAL','FKEYS_INTERNAL', 'FKEYS_EXTERNAL', 'COMMENTS', 'FKEYS_NONE', 'INCLUDE_TRIGGERS', 'NO_TRIGGERS');
|
||||
|
||||
-- SELECT * FROM public.pg_get_coldef('sample','orders','id');
|
||||
-- DROP FUNCTION public.pg_get_coldef(text,text,text,boolean);
|
||||
CREATE OR REPLACE FUNCTION public.pg_get_coldef(
|
||||
in_schema text,
|
||||
in_table text,
|
||||
in_column text,
|
||||
oldway boolean default False
|
||||
)
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql VOLATILE
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
v_coldef text;
|
||||
v_dt1 text;
|
||||
v_dt2 text;
|
||||
v_dt3 text;
|
||||
v_nullable boolean;
|
||||
v_position int;
|
||||
v_identity text;
|
||||
v_generated text;
|
||||
v_hasdflt boolean;
|
||||
v_dfltexpr text;
|
||||
|
||||
BEGIN
|
||||
IF oldway THEN
|
||||
SELECT pg_catalog.format_type(a.atttypid, a.atttypmod) INTO v_coldef FROM pg_namespace n, pg_class c, pg_attribute a, pg_type t
|
||||
WHERE n.nspname = in_schema AND n.oid = c.relnamespace AND c.relname = in_table AND a.attname = in_column and a.attnum > 0 AND a.attrelid = c.oid AND a.atttypid = t.oid ORDER BY a.attnum;
|
||||
-- RAISE NOTICE 'DEBUG: oldway=%',v_coldef;
|
||||
ELSE
|
||||
-- a.attrelid::regclass::text, a.attname
|
||||
SELECT CASE WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS (SELECT FROM pg_attrdef ad WHERE ad.adrelid = a.attrelid AND ad.adnum = a.attnum AND
|
||||
pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || (pg_get_serial_sequence (a.attrelid::regclass::text, a.attname))::regclass || '''::regclass)') THEN CASE a.atttypid
|
||||
WHEN 'int'::regtype THEN 'serial' WHEN 'int8'::regtype THEN 'bigserial' WHEN 'int2'::regtype THEN 'smallserial' END ELSE format_type(a.atttypid, a.atttypmod) END AS data_type
|
||||
INTO v_coldef FROM pg_namespace n, pg_class c, pg_attribute a, pg_type t
|
||||
WHERE n.nspname = in_schema AND n.oid = c.relnamespace AND c.relname = in_table AND a.attname = in_column and a.attnum > 0 AND a.attrelid = c.oid AND a.atttypid = t.oid ORDER BY a.attnum;
|
||||
-- RAISE NOTICE 'DEBUG: newway=%',v_coldef;
|
||||
|
||||
-- Issue#24: not implemented yet
|
||||
-- might replace with this below to do more detailed parsing...
|
||||
-- SELECT a.atttypid::regtype AS dt1, format_type(a.atttypid, a.atttypmod) as dt2, t.typname as dt3, CASE WHEN not(a.attnotnull) THEN True ELSE False END AS nullable,
|
||||
-- a.attnum, a.attidentity, a.attgenerated, a.atthasdef, pg_get_expr(ad.adbin, ad.adrelid) dfltexpr
|
||||
-- INTO v_dt1, v_dt2, v_dt3, v_nullable, v_position, v_identity, v_generated, v_hasdflt, v_dfltexpr
|
||||
-- FROM pg_attribute a JOIN pg_class c ON (a.attrelid = c.oid) JOIN pg_type t ON (a.atttypid = t.oid) LEFT JOIN pg_attrdef ad ON (a.attrelid = ad.adrelid AND a.attnum = ad.adnum)
|
||||
-- WHERE c.relkind in ('r','p') AND a.attnum > 0 AND NOT a.attisdropped AND c.relnamespace::regnamespace::text = in_schema AND c.relname = in_table AND a.attname = in_column;
|
||||
-- RAISE NOTICE 'schema=% table=% column=% dt1=% dt2=% dt3=% nullable=% pos=% identity=% generated=% HasDefault=% DeftExpr=%', in_schema, in_table, in_column, v_dt1,v_dt2,v_dt3,v_nullable,v_position,v_identity,v_generated,v_hasdflt,v_dfltexpr;
|
||||
END IF;
|
||||
RETURN v_coldef;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- SELECT * FROM public.pg_get_tabledef('sample', 'address', false);
|
||||
DROP FUNCTION IF EXISTS public.pg_get_tabledef(character varying,character varying,boolean,tabledefs[]);
|
||||
CREATE OR REPLACE FUNCTION public.pg_get_tabledef(
|
||||
in_schema varchar,
|
||||
in_table varchar,
|
||||
_verbose boolean,
|
||||
VARIADIC arr public.tabledefs[] DEFAULT '{}':: public.tabledefs[]
|
||||
)
|
||||
RETURNS text
|
||||
LANGUAGE plpgsql VOLATILE
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
v_qualified text := '';
|
||||
v_table_ddl text;
|
||||
v_table_oid int;
|
||||
v_colrec record;
|
||||
v_constraintrec record;
|
||||
v_trigrec record;
|
||||
v_indexrec record;
|
||||
v_rec record;
|
||||
v_constraint_name text;
|
||||
v_constraint_def text;
|
||||
v_pkey_def text := '';
|
||||
v_fkey_def text := '';
|
||||
v_fkey_defs text := '';
|
||||
v_trigger text := '';
|
||||
v_partition_key text := '';
|
||||
v_partbound text;
|
||||
v_parent text;
|
||||
v_parent_schema text;
|
||||
v_persist text;
|
||||
v_temp text := '';
|
||||
v_relopts text;
|
||||
v_tablespace text;
|
||||
v_pgversion int;
|
||||
bSerial boolean;
|
||||
bPartition boolean;
|
||||
bInheritance boolean;
|
||||
bRelispartition boolean;
|
||||
constraintarr text[] := '{}';
|
||||
constraintelement text;
|
||||
bSkip boolean;
|
||||
bVerbose boolean := False;
|
||||
v_cnt1 integer;
|
||||
v_cnt2 integer;
|
||||
v_src_path_old text := '';
|
||||
v_src_path_new text := '';
|
||||
|
||||
-- assume defaults for ENUMs at the getgo
|
||||
pkcnt int := 0;
|
||||
fkcnt int := 0;
|
||||
trigcnt int := 0;
|
||||
cmtcnt int := 0;
|
||||
pktype public.tabledefs := 'PKEY_INTERNAL';
|
||||
fktype public.tabledefs := 'FKEYS_INTERNAL';
|
||||
trigtype public.tabledefs := 'NO_TRIGGERS';
|
||||
arglen integer;
|
||||
vargs text;
|
||||
avarg public.tabledefs;
|
||||
|
||||
-- exception variables
|
||||
v_ret text;
|
||||
v_diag1 text;
|
||||
v_diag2 text;
|
||||
v_diag3 text;
|
||||
v_diag4 text;
|
||||
v_diag5 text;
|
||||
v_diag6 text;
|
||||
|
||||
BEGIN
|
||||
SET client_min_messages = 'notice';
|
||||
IF _verbose THEN bVerbose = True; END IF;
|
||||
|
||||
-- v17 fix: handle case-sensitive
|
||||
-- v_qualified = in_schema || '.' || in_table;
|
||||
|
||||
arglen := array_length($4, 1);
|
||||
IF arglen IS NULL THEN
|
||||
-- nothing to do, so assume defaults
|
||||
NULL;
|
||||
ELSE
|
||||
-- loop thru args
|
||||
-- IF 'NO_TRIGGERS' = ANY ($4)
|
||||
-- select array_to_string($4, ',', '***') INTO vargs;
|
||||
IF bVerbose THEN RAISE NOTICE 'arguments=%', $4; END IF;
|
||||
FOREACH avarg IN ARRAY $4 LOOP
|
||||
IF bVerbose THEN RAISE INFO 'arg=%', avarg; END IF;
|
||||
IF avarg = 'FKEYS_INTERNAL' OR avarg = 'FKEYS_EXTERNAL' OR avarg = 'FKEYS_NONE' THEN
|
||||
fkcnt = fkcnt + 1;
|
||||
fktype = avarg;
|
||||
ELSEIF avarg = 'INCLUDE_TRIGGERS' OR avarg = 'NO_TRIGGERS' THEN
|
||||
trigcnt = trigcnt + 1;
|
||||
trigtype = avarg;
|
||||
ELSEIF avarg = 'PKEY_EXTERNAL' THEN
|
||||
pkcnt = pkcnt + 1;
|
||||
pktype = avarg;
|
||||
ELSEIF avarg = 'COMMENTS' THEN
|
||||
cmtcnt = cmtcnt + 1;
|
||||
|
||||
END IF;
|
||||
END LOOP;
|
||||
IF fkcnt > 1 THEN
|
||||
RAISE WARNING 'Only one foreign key option can be provided. You provided %', fkcnt;
|
||||
RETURN '';
|
||||
ELSEIF trigcnt > 1 THEN
|
||||
RAISE WARNING 'Only one trigger option can be provided. You provided %', trigcnt;
|
||||
RETURN '';
|
||||
ELSEIF pkcnt > 1 THEN
|
||||
RAISE WARNING 'Only one pkey option can be provided. You provided %', pkcnt;
|
||||
RETURN '';
|
||||
ELSEIF cmtcnt > 1 THEN
|
||||
RAISE WARNING 'Only one comments option can be provided. You provided %', cmtcnt;
|
||||
RETURN '';
|
||||
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
SELECT c.oid, (select setting from pg_settings where name = 'server_version_num') INTO v_table_oid, v_pgversion FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
||||
WHERE c.relkind in ('r','p') AND c.relname = in_table AND n.nspname = in_schema;
|
||||
|
||||
-- set search_path = public before we do anything to force explicit schema qualification but dont forget to set it back before exiting...
|
||||
SELECT setting INTO v_src_path_old FROM pg_settings WHERE name = 'search_path';
|
||||
|
||||
SELECT REPLACE(REPLACE(setting, '"$user"', '$user'), '$user', '"$user"') INTO v_src_path_old
|
||||
FROM pg_settings
|
||||
WHERE name = 'search_path';
|
||||
-- RAISE INFO 'DEBUG tableddl: saving old search_path: ***%***', v_src_path_old;
|
||||
EXECUTE 'SET search_path = "public"';
|
||||
SELECT setting INTO v_src_path_new FROM pg_settings WHERE name = 'search_path';
|
||||
-- RAISE INFO 'DEBUG tableddl: using new search path=***%***', v_src_path_new;
|
||||
|
||||
-- throw an error if table was not found
|
||||
IF (v_table_oid IS NULL) THEN
|
||||
RAISE EXCEPTION 'table does not exist';
|
||||
END IF;
|
||||
|
||||
-- get user-defined tablespaces if applicable
|
||||
SELECT tablespace INTO v_temp FROM pg_tables WHERE schemaname = in_schema and tablename = in_table and tablespace IS NOT NULL;
|
||||
IF v_temp IS NULL THEN
|
||||
v_tablespace := 'TABLESPACE pg_default';
|
||||
ELSE
|
||||
v_tablespace := 'TABLESPACE ' || v_temp;
|
||||
END IF;
|
||||
|
||||
-- also see if there are any SET commands for this table, ie, autovacuum_enabled=off, fillfactor=70
|
||||
WITH relopts AS (SELECT unnest(c.reloptions) relopts FROM pg_class c, pg_namespace n WHERE n.nspname = in_schema and n.oid = c.relnamespace and c.relname = in_table)
|
||||
SELECT string_agg(r.relopts, ', ') as relopts INTO v_temp from relopts r;
|
||||
IF v_temp IS NULL THEN
|
||||
v_relopts := '';
|
||||
ELSE
|
||||
v_relopts := ' WITH (' || v_temp || ')';
|
||||
END IF;
|
||||
|
||||
-- -----------------------------------------------------------------------------------
|
||||
-- Create table defs for partitions/children using inheritance or declarative methods.
|
||||
-- inheritance: pg_class.relkind = 'r' pg_class.relispartition=false pg_class.relpartbound is NULL
|
||||
-- declarative: pg_class.relkind = 'r' pg_class.relispartition=true pg_class.relpartbound is NOT NULL
|
||||
-- -----------------------------------------------------------------------------------
|
||||
v_partbound := '';
|
||||
bPartition := False;
|
||||
bInheritance := False;
|
||||
IF v_pgversion < 100000 THEN
|
||||
-- Issue#11: handle parent schema
|
||||
SELECT c2.relname parent, c2.relnamespace::regnamespace INTO v_parent, v_parent_schema from pg_class c1, pg_namespace n, pg_inherits i, pg_class c2
|
||||
WHERE n.nspname = in_schema and n.oid = c1.relnamespace and c1.relname = in_table and c1.oid = i.inhrelid and i.inhparent = c2.oid and c1.relkind = 'r';
|
||||
IF (v_parent IS NOT NULL) THEN
|
||||
bPartition := True;
|
||||
bInheritance := True;
|
||||
END IF;
|
||||
ELSE
|
||||
-- Issue#11: handle parent schema
|
||||
SELECT c2.relname parent, c1.relispartition, pg_get_expr(c1.relpartbound, c1.oid, true), c2.relnamespace::regnamespace INTO v_parent, bRelispartition, v_partbound, v_parent_schema from pg_class c1, pg_namespace n, pg_inherits i, pg_class c2
|
||||
WHERE n.nspname = in_schema and n.oid = c1.relnamespace and c1.relname = in_table and c1.oid = i.inhrelid and i.inhparent = c2.oid and c1.relkind = 'r';
|
||||
IF (v_parent IS NOT NULL) THEN
|
||||
bPartition := True;
|
||||
IF bRelispartition THEN
|
||||
bInheritance := False;
|
||||
ELSE
|
||||
bInheritance := True;
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
IF bPartition THEN
|
||||
--Issue#17 fix for case-sensitive tables
|
||||
-- SELECT count(*) INTO v_cnt1 FROM information_schema.tables t WHERE EXISTS (SELECT REGEXP_MATCHES(s.table_name, '([A-Z]+)','g') FROM information_schema.tables s
|
||||
-- WHERE t.table_schema=s.table_schema AND t.table_name=s.table_name AND t.table_schema = quote_ident(in_schema) AND t.table_name = quote_ident(in_table) AND t.table_type = 'BASE TABLE');
|
||||
SELECT count(*) INTO v_cnt1 FROM information_schema.tables t WHERE EXISTS (SELECT REGEXP_MATCHES(s.table_name, '([A-Z]+)','g') FROM information_schema.tables s
|
||||
WHERE t.table_schema=s.table_schema AND t.table_name=s.table_name AND t.table_schema = in_schema AND t.table_name = in_table AND t.table_type = 'BASE TABLE');
|
||||
|
||||
--Issue#19 put double-quotes around SQL keyword column names
|
||||
-- Issue#121: fix keyword lookup for table name not column name that does not apply here
|
||||
-- SELECT COUNT(*) INTO v_cnt2 FROM pg_get_keywords() WHERE word = v_colrec.column_name AND catcode = 'R';
|
||||
SELECT COUNT(*) INTO v_cnt2 FROM pg_get_keywords() WHERE word = in_table AND catcode = 'R';
|
||||
|
||||
IF bInheritance THEN
|
||||
-- inheritance-based
|
||||
IF v_cnt1 > 0 OR v_cnt2 > 0 THEN
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '."' || in_table || '"( '|| E'\\n';
|
||||
ELSE
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || '( '|| E'\\n';
|
||||
END IF;
|
||||
|
||||
-- Jump to constraints section to add the check constraints
|
||||
ELSE
|
||||
-- declarative-based
|
||||
IF v_relopts <> '' THEN
|
||||
IF v_cnt1 > 0 OR v_cnt2 > 0 THEN
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '."' || in_table || '" PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || v_relopts || ' ' || v_tablespace || '; ' || E'\\n';
|
||||
ELSE
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || ' PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || v_relopts || ' ' || v_tablespace || '; ' || E'\\n';
|
||||
END IF;
|
||||
ELSE
|
||||
IF v_cnt1 > 0 OR v_cnt2 > 0 THEN
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '."' || in_table || '" PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || ' ' || v_tablespace || '; ' || E'\\n';
|
||||
ELSE
|
||||
v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || ' PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || ' ' || v_tablespace || '; ' || E'\\n';
|
||||
END IF;
|
||||
END IF;
|
||||
-- Jump to constraints and index section to add the check constraints and indexes and perhaps FKeys
|
||||
END IF;
|
||||
END IF;
|
||||
IF bVerbose THEN RAISE INFO '(1)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
IF NOT bPartition THEN
|
||||
-- see if this is unlogged or temporary table
|
||||
select c.relpersistence into v_persist from pg_class c, pg_namespace n where n.nspname = in_schema and n.oid = c.relnamespace and c.relname = in_table and c.relkind = 'r';
|
||||
IF v_persist = 'u' THEN
|
||||
v_temp := 'UNLOGGED';
|
||||
ELSIF v_persist = 't' THEN
|
||||
v_temp := 'TEMPORARY';
|
||||
ELSE
|
||||
v_temp := '';
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- start the create definition for regular tables unless we are in progress creating an inheritance-based child table
|
||||
IF NOT bPartition THEN
|
||||
--Issue#17 fix for case-sensitive tables
|
||||
-- SELECT count(*) INTO v_cnt1 FROM information_schema.tables t WHERE EXISTS (SELECT REGEXP_MATCHES(s.table_name, '([A-Z]+)','g') FROM information_schema.tables s
|
||||
-- WHERE t.table_schema=s.table_schema AND t.table_name=s.table_name AND t.table_schema = quote_ident(in_schema) AND t.table_name = quote_ident(in_table) AND t.table_type = 'BASE TABLE');
|
||||
SELECT count(*) INTO v_cnt1 FROM information_schema.tables t WHERE EXISTS (SELECT REGEXP_MATCHES(s.table_name, '([A-Z]+)','g') FROM information_schema.tables s
|
||||
WHERE t.table_schema=s.table_schema AND t.table_name=s.table_name AND t.table_schema = in_schema AND t.table_name = in_table AND t.table_type = 'BASE TABLE');
|
||||
IF v_cnt1 > 0 THEN
|
||||
v_table_ddl := 'CREATE ' || v_temp || ' TABLE ' || in_schema || '."' || in_table || '" (' || E'\\n';
|
||||
ELSE
|
||||
v_table_ddl := 'CREATE ' || v_temp || ' TABLE ' || in_schema || '.' || in_table || ' (' || E'\\n';
|
||||
END IF;
|
||||
END IF;
|
||||
-- RAISE INFO 'DEBUG2: tabledef so far: %', v_table_ddl;
|
||||
-- define all of the columns in the table unless we are in progress creating an inheritance-based child table
|
||||
IF NOT bPartition THEN
|
||||
FOR v_colrec IN
|
||||
SELECT c.column_name, c.data_type, c.udt_name, c.udt_schema, c.character_maximum_length, c.is_nullable, c.column_default, c.numeric_precision, c.numeric_scale, c.is_identity, c.identity_generation, c.is_generated, c.generation_expression
|
||||
FROM information_schema.columns c WHERE (table_schema, table_name) = (in_schema, in_table) ORDER BY ordinal_position
|
||||
LOOP
|
||||
IF bVerbose THEN RAISE INFO '(col loop) name=% type=% udt_name=% default=% is_generated=% gen_expr=%', v_colrec.column_name, v_colrec.data_type, v_colrec.udt_name, v_colrec.column_default, v_colrec.is_generated, v_colrec.generation_expression; END IF;
|
||||
|
||||
-- v17 fix: handle case-sensitive for pg_get_serial_sequence that requires SQL Identifier handling
|
||||
-- SELECT CASE WHEN pg_get_serial_sequence(v_qualified, v_colrec.column_name) IS NOT NULL THEN True ELSE False END into bSerial;
|
||||
SELECT CASE WHEN pg_get_serial_sequence(quote_ident(in_schema) || '.' || quote_ident(in_table), v_colrec.column_name) IS NOT NULL THEN True ELSE False END into bSerial;
|
||||
IF bVerbose THEN
|
||||
-- v17 fix: handle case-sensitive for pg_get_serial_sequence that requires SQL Identifier handling
|
||||
-- SELECT pg_get_serial_sequence(v_qualified, v_colrec.column_name) into v_temp;
|
||||
SELECT pg_get_serial_sequence(quote_ident(in_schema) || '.' || quote_ident(in_table), v_colrec.column_name) into v_temp;
|
||||
IF v_temp IS NULL THEN v_temp = 'NA'; END IF;
|
||||
SELECT public.pg_get_coldef(in_schema, in_table,v_colrec.column_name) INTO v_diag1;
|
||||
RAISE NOTICE 'DEBUG table: % Column: % datatype: % Serial=% serialval=% coldef=%', v_qualified, v_colrec.column_name, v_colrec.data_type, bSerial, v_temp, v_diag1;
|
||||
RAISE NOTICE 'DEBUG tabledef: %', v_table_ddl;
|
||||
END IF;
|
||||
|
||||
--Issue#17 put double-quotes around case-sensitive column names
|
||||
SELECT COUNT(*) INTO v_cnt1 FROM information_schema.columns t WHERE EXISTS (SELECT REGEXP_MATCHES(s.column_name, '([A-Z]+)','g') FROM information_schema.columns s
|
||||
WHERE t.table_schema=s.table_schema and t.table_name=s.table_name and t.column_name=s.column_name AND t.table_schema = quote_ident(in_schema) AND column_name = v_colrec.column_name);
|
||||
|
||||
--Issue#19 put double-quotes around SQL keyword column names
|
||||
SELECT COUNT(*) INTO v_cnt2 FROM pg_get_keywords() WHERE word = v_colrec.column_name AND catcode = 'R';
|
||||
|
||||
IF v_cnt1 > 0 OR v_cnt2 > 0 THEN
|
||||
v_table_ddl := v_table_ddl || ' "' || v_colrec.column_name || '" ';
|
||||
ELSE
|
||||
v_table_ddl := v_table_ddl || ' ' || v_colrec.column_name || ' ';
|
||||
END IF;
|
||||
|
||||
-- Issue#23: Handle autogenerated columns and rewrite as a simpler IF THEN ELSE branch instead of a much more complex embedded CASE STATEMENT
|
||||
IF v_colrec.is_generated = 'ALWAYS' and v_colrec.generation_expression IS NOT NULL THEN
|
||||
-- searchable tsvector GENERATED ALWAYS AS (to_tsvector('simple'::regconfig, COALESCE(translate(email, '@.-'::citext, ' '::text), ''::text)) ) STORED
|
||||
v_temp = v_colrec.data_type || ' GENERATED ALWAYS AS (' || v_colrec.generation_expression || ') STORED ';
|
||||
ELSEIF v_colrec.udt_name in ('geometry', 'box2d', 'box2df', 'box3d', 'geography', 'geometry_dump', 'gidx', 'spheroid', 'valid_detail') THEN
|
||||
v_temp = v_colrec.udt_name;
|
||||
ELSEIF v_colrec.data_type = 'USER-DEFINED' THEN
|
||||
v_temp = v_colrec.udt_schema || '.' || v_colrec.udt_name;
|
||||
ELSEIF v_colrec.data_type = 'ARRAY' THEN
|
||||
-- Issue#6 fix: handle arrays
|
||||
v_temp = public.pg_get_coldef(in_schema, in_table,v_colrec.column_name);
|
||||
-- v17 fix: handle case-sensitive for pg_get_serial_sequence that requires SQL Identifier handling
|
||||
-- WHEN pg_get_serial_sequence(v_qualified, v_colrec.column_name) IS NOT NULL
|
||||
ELSEIF pg_get_serial_sequence(quote_ident(in_schema) || '.' || quote_ident(in_table), v_colrec.column_name) IS NOT NULL THEN
|
||||
-- Issue#8 fix: handle serial. Note: NOT NULL is implied so no need to declare it explicitly
|
||||
v_temp = public.pg_get_coldef(in_schema, in_table,v_colrec.column_name);
|
||||
ELSE
|
||||
v_temp = v_colrec.data_type;
|
||||
END IF;
|
||||
-- RAISE NOTICE 'column def1=%', v_temp;
|
||||
|
||||
-- handle IDENTITY columns
|
||||
IF v_colrec.is_identity = 'YES' THEN
|
||||
IF v_colrec.identity_generation = 'ALWAYS' THEN
|
||||
v_temp = v_temp || ' GENERATED ALWAYS AS IDENTITY NOT NULL';
|
||||
ELSE
|
||||
v_temp = v_temp || ' GENERATED BY DEFAULT AS IDENTITY NOT NULL';
|
||||
END IF;
|
||||
ELSEIF v_colrec.character_maximum_length IS NOT NULL THEN
|
||||
v_temp = v_temp || ('(' || v_colrec.character_maximum_length || ')');
|
||||
ELSEIF v_colrec.numeric_precision > 0 AND v_colrec.numeric_scale > 0 THEN
|
||||
v_temp = v_temp || '(' || v_colrec.numeric_precision || ',' || v_colrec.numeric_scale || ')';
|
||||
END IF;
|
||||
|
||||
-- Handle NULL/NOT NULL
|
||||
IF bSerial THEN
|
||||
v_temp = v_temp || ' NOT NULL';
|
||||
ELSEIF v_colrec.is_nullable = 'NO' THEN
|
||||
v_temp = v_temp || ' NOT NULL';
|
||||
ELSEIF v_colrec.is_nullable = 'YES' THEN
|
||||
v_temp = v_temp || ' NULL';
|
||||
END IF;
|
||||
|
||||
-- Handle defaults
|
||||
IF v_colrec.column_default IS NOT null AND NOT bSerial THEN
|
||||
-- RAISE INFO 'Setting default for column, %', v_colrec.column_name;
|
||||
v_temp = v_temp || (' DEFAULT ' || v_colrec.column_default);
|
||||
END IF;
|
||||
v_temp = v_temp || ',' || E'\\n';
|
||||
-- RAISE NOTICE 'column def2=%', v_temp;
|
||||
v_table_ddl := v_table_ddl || v_temp;
|
||||
-- RAISE NOTICE 'tabledef=%', v_table_ddl;
|
||||
|
||||
END LOOP;
|
||||
END IF;
|
||||
IF bVerbose THEN RAISE INFO '(2)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- define all the constraints: conparentid does not exist pre PGv11
|
||||
IF v_pgversion < 110000 THEN
|
||||
FOR v_constraintrec IN
|
||||
SELECT con.conname as constraint_name, con.contype as constraint_type,
|
||||
CASE
|
||||
WHEN con.contype = 'p' THEN 1 -- primary key constraint
|
||||
WHEN con.contype = 'u' THEN 2 -- unique constraint
|
||||
WHEN con.contype = 'f' THEN 3 -- foreign key constraint
|
||||
WHEN con.contype = 'c' THEN 4
|
||||
ELSE 5
|
||||
END as type_rank,
|
||||
pg_get_constraintdef(con.oid) as constraint_definition
|
||||
FROM pg_catalog.pg_constraint con JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
|
||||
WHERE nsp.nspname = in_schema AND rel.relname = in_table ORDER BY type_rank
|
||||
LOOP
|
||||
v_constraint_name := v_constraintrec.constraint_name;
|
||||
v_constraint_def := v_constraintrec.constraint_definition;
|
||||
IF v_constraintrec.type_rank = 1 THEN
|
||||
IF pkcnt = 0 OR pktype = 'PKEY_INTERNAL' THEN
|
||||
-- internal def
|
||||
v_constraint_name := v_constraintrec.constraint_name;
|
||||
v_constraint_def := v_constraintrec.constraint_definition;
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
ELSE
|
||||
-- Issue#16 handle external PG def
|
||||
SELECT 'ALTER TABLE ONLY ' || in_schema || '.' || c.relname || ' ADD CONSTRAINT ' || r.conname || ' ' || pg_catalog.pg_get_constraintdef(r.oid, true) || ';' INTO v_pkey_def
|
||||
FROM pg_catalog.pg_constraint r, pg_class c, pg_namespace n where r.conrelid = c.oid and r.contype = 'p' and n.oid = r.connamespace and n.nspname = in_schema AND c.relname = in_table and r.conname = v_constraint_name;
|
||||
END IF;
|
||||
IF bPartition THEN
|
||||
continue;
|
||||
END IF;
|
||||
ELSIF v_constraintrec.type_rank = 3 THEN
|
||||
-- handle foreign key constraints
|
||||
--Issue#22 fix: added FKEY_NONE check
|
||||
IF fktype = 'FKEYS_NONE' THEN
|
||||
-- skip
|
||||
continue;
|
||||
ELSIF fkcnt = 0 OR fktype = 'FKEYS_INTERNAL' THEN
|
||||
-- internal def
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
ELSE
|
||||
-- external def
|
||||
SELECT 'ALTER TABLE ONLY ' || n.nspname || '.' || c2.relname || ' ADD CONSTRAINT ' || r.conname || ' ' || pg_catalog.pg_get_constraintdef(r.oid, true) || ';' INTO v_fkey_def
|
||||
FROM pg_constraint r, pg_class c1, pg_namespace n, pg_class c2 where r.conrelid = c1.oid and r.contype = 'f' and n.nspname = in_schema and n.oid = r.connamespace and r.conrelid = c2.oid and c2.relname = in_table;
|
||||
v_fkey_defs = v_fkey_defs || v_fkey_def || E'\\n';
|
||||
END IF;
|
||||
ELSE
|
||||
-- handle all other constraints besides PKEY and FKEYS as internal defs by default
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
END IF;
|
||||
if bVerbose THEN RAISE INFO 'DEBUG4: constraint name=% constraint_def=%', v_constraint_name,v_constraint_def; END IF;
|
||||
constraintarr := constraintarr || v_constraintrec.constraint_name:: text;
|
||||
|
||||
END LOOP;
|
||||
ELSE
|
||||
-- handle PG versions 11 and up
|
||||
-- Issue#20: Fix logic for external PKEY and FKEYS
|
||||
FOR v_constraintrec IN
|
||||
SELECT con.conname as constraint_name, con.contype as constraint_type,
|
||||
CASE
|
||||
WHEN con.contype = 'p' THEN 1 -- primary key constraint
|
||||
WHEN con.contype = 'u' THEN 2 -- unique constraint
|
||||
WHEN con.contype = 'f' THEN 3 -- foreign key constraint
|
||||
WHEN con.contype = 'c' THEN 4
|
||||
ELSE 5
|
||||
END as type_rank,
|
||||
pg_get_constraintdef(con.oid) as constraint_definition
|
||||
FROM pg_catalog.pg_constraint con JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
|
||||
WHERE nsp.nspname = in_schema AND rel.relname = in_table
|
||||
--Issue#13 added this condition:
|
||||
AND con.conparentid = 0
|
||||
ORDER BY type_rank
|
||||
LOOP
|
||||
v_constraint_name := v_constraintrec.constraint_name;
|
||||
v_constraint_def := v_constraintrec.constraint_definition;
|
||||
IF v_constraintrec.type_rank = 1 THEN
|
||||
IF pkcnt = 0 OR pktype = 'PKEY_INTERNAL' THEN
|
||||
-- internal def
|
||||
v_constraint_name := v_constraintrec.constraint_name;
|
||||
v_constraint_def := v_constraintrec.constraint_definition;
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
ELSE
|
||||
-- Issue#16 handle external PG def
|
||||
SELECT 'ALTER TABLE ONLY ' || in_schema || '.' || c.relname || ' ADD CONSTRAINT ' || r.conname || ' ' || pg_catalog.pg_get_constraintdef(r.oid, true) || ';' INTO v_pkey_def
|
||||
FROM pg_catalog.pg_constraint r, pg_class c, pg_namespace n where r.conrelid = c.oid and r.contype = 'p' and n.oid = r.connamespace and n.nspname = in_schema AND c.relname = in_table;
|
||||
END IF;
|
||||
IF bPartition THEN
|
||||
continue;
|
||||
END IF;
|
||||
ELSIF v_constraintrec.type_rank = 3 THEN
|
||||
-- handle foreign key constraints
|
||||
--Issue#22 fix: added FKEY_NONE check
|
||||
IF fktype = 'FKEYS_NONE' THEN
|
||||
-- skip
|
||||
continue;
|
||||
ELSIF fkcnt = 0 OR fktype = 'FKEYS_INTERNAL' THEN
|
||||
-- internal def
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
ELSE
|
||||
-- external def
|
||||
SELECT 'ALTER TABLE ONLY ' || n.nspname || '.' || c2.relname || ' ADD CONSTRAINT ' || r.conname || ' ' || pg_catalog.pg_get_constraintdef(r.oid, true) || ';' INTO v_fkey_def
|
||||
FROM pg_constraint r, pg_class c1, pg_namespace n, pg_class c2 where r.conrelid = c1.oid and r.contype = 'f' and n.nspname = in_schema and n.oid = r.connamespace and r.conrelid = c2.oid and c2.relname = in_table and
|
||||
r.conname = v_constraint_name and r.conparentid = 0;
|
||||
v_fkey_defs = v_fkey_defs || v_fkey_def || E'\\n';
|
||||
END IF;
|
||||
ELSE
|
||||
-- handle all other constraints besides PKEY and FKEYS as internal defs by default
|
||||
v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column
|
||||
|| 'CONSTRAINT' || ' '
|
||||
|| v_constraint_name || ' '
|
||||
|| v_constraint_def
|
||||
|| ',' || E'\\n';
|
||||
END IF;
|
||||
if bVerbose THEN RAISE INFO 'DEBUG4: constraint name=% constraint_def=%', v_constraint_name,v_constraint_def; END IF;
|
||||
constraintarr := constraintarr || v_constraintrec.constraint_name:: text;
|
||||
|
||||
END LOOP;
|
||||
END IF;
|
||||
IF bVerbose THEN RAISE INFO '(3)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- drop the last comma before ending the create statement
|
||||
v_table_ddl = substr(v_table_ddl, 0, length(v_table_ddl) - 1) || E'\\n';
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- at this point we have everything up to the last table-enclosing parenthesis
|
||||
-- ---------------------------------------------------------------------------
|
||||
IF bVerbose THEN RAISE INFO '(4)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- See if this is an inheritance-based child table and finish up the table create.
|
||||
IF bPartition and bInheritance THEN
|
||||
-- Issue#11: handle parent schema
|
||||
-- v_table_ddl := v_table_ddl || ') INHERITS (' || in_schema || '.' || v_parent || ') ' || E'\\n' || v_relopts || ' ' || v_tablespace || ';' || E'\\n';
|
||||
IF v_parent_schema = '' OR v_parent_schema IS NULL THEN v_parent_schema = in_schema; END IF;
|
||||
v_table_ddl := v_table_ddl || ') INHERITS (' || v_parent_schema || '.' || v_parent || ') ' || E'\\n' || v_relopts || ' ' || v_tablespace || ';' || E'\\n';
|
||||
END IF;
|
||||
|
||||
IF v_pgversion >= 100000 AND NOT bPartition and NOT bInheritance THEN
|
||||
-- See if this is a partitioned table (pg_class.relkind = 'p') and add the partitioned key
|
||||
SELECT pg_get_partkeydef(c1.oid) as partition_key INTO v_partition_key FROM pg_class c1 JOIN pg_namespace n ON (n.oid = c1.relnamespace) LEFT JOIN pg_partitioned_table p ON (c1.oid = p.partrelid)
|
||||
WHERE n.nspname = in_schema and n.oid = c1.relnamespace and c1.relname = in_table and c1.relkind = 'p';
|
||||
|
||||
IF v_partition_key IS NOT NULL AND v_partition_key <> '' THEN
|
||||
-- add partition clause
|
||||
-- NOTE: cannot specify default tablespace for partitioned relations
|
||||
-- v_table_ddl := v_table_ddl || ') PARTITION BY ' || v_partition_key || ' ' || v_tablespace || ';' || E'\\n';
|
||||
v_table_ddl := v_table_ddl || ') PARTITION BY ' || v_partition_key || ';' || E'\\n';
|
||||
ELSEIF v_relopts <> '' THEN
|
||||
v_table_ddl := v_table_ddl || ') ' || v_relopts || ' ' || v_tablespace || ';' || E'\\n';
|
||||
ELSE
|
||||
-- end the create definition
|
||||
v_table_ddl := v_table_ddl || ') ' || v_tablespace || ';' || E'\\n';
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF bVerbose THEN RAISE INFO '(5)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- Add closing paren for regular tables
|
||||
-- IF NOT bPartition THEN
|
||||
-- v_table_ddl := v_table_ddl || ') ' || v_relopts || ' ' || v_tablespace || E';\\n';
|
||||
-- END IF;
|
||||
-- RAISE NOTICE 'ddlsofar3: %', v_table_ddl;
|
||||
|
||||
-- Issue#16 create the external PKEY def if indicated
|
||||
IF v_pkey_def <> '' THEN
|
||||
v_table_ddl := v_table_ddl || v_pkey_def || E'\\n';
|
||||
END IF;
|
||||
|
||||
-- Issue#20
|
||||
IF v_fkey_defs <> '' THEN
|
||||
v_table_ddl := v_table_ddl || v_fkey_defs || E'\\n';
|
||||
END IF;
|
||||
|
||||
IF bVerbose THEN RAISE INFO '(6)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- create indexes
|
||||
FOR v_indexrec IN
|
||||
SELECT indexdef, COALESCE(tablespace, 'pg_default') as tablespace, indexname FROM pg_indexes WHERE (schemaname, tablename) = (in_schema, in_table)
|
||||
LOOP
|
||||
-- RAISE INFO 'DEBUG6: indexname=%', v_indexrec.indexname;
|
||||
-- loop through constraints and skip ones already defined
|
||||
bSkip = False;
|
||||
FOREACH constraintelement IN ARRAY constraintarr
|
||||
LOOP
|
||||
IF constraintelement = v_indexrec.indexname THEN
|
||||
-- RAISE INFO 'DEBUG7: skipping index, %', v_indexrec.indexname;
|
||||
bSkip = True;
|
||||
EXIT;
|
||||
END IF;
|
||||
END LOOP;
|
||||
if bSkip THEN CONTINUE; END IF;
|
||||
|
||||
-- Add IF NOT EXISTS clause so partition index additions will not be created if declarative partition in effect and index already created on parent
|
||||
v_indexrec.indexdef := REPLACE(v_indexrec.indexdef, 'CREATE UNIQUE INDEX', 'CREATE UNIQUE INDEX IF NOT EXISTS');
|
||||
v_indexrec.indexdef := REPLACE(v_indexrec.indexdef, 'CREATE INDEX', 'CREATE INDEX IF NOT EXISTS');
|
||||
-- RAISE INFO 'DEBUG8: adding index, %', v_indexrec.indexname;
|
||||
|
||||
-- NOTE: cannot specify default tablespace for partitioned relations
|
||||
IF v_partition_key IS NOT NULL AND v_partition_key <> '' THEN
|
||||
v_table_ddl := v_table_ddl || v_indexrec.indexdef || ';' || E'\\n';
|
||||
ELSE
|
||||
v_table_ddl := v_table_ddl || v_indexrec.indexdef || ' TABLESPACE ' || v_indexrec.tablespace || ';' || E'\\n';
|
||||
END IF;
|
||||
|
||||
END LOOP;
|
||||
IF bVerbose THEN RAISE INFO '(7)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- Issue#20: added logic for table and column comments
|
||||
IF cmtcnt > 0 THEN
|
||||
FOR v_rec IN
|
||||
SELECT c.relname, 'COMMENT ON ' || CASE WHEN c.relkind in ('r','p') AND a.attname IS NULL THEN 'TABLE ' WHEN c.relkind in ('r','p') AND a.attname IS NOT NULL THEN 'COLUMN ' WHEN c.relkind = 'f' THEN 'FOREIGN TABLE '
|
||||
WHEN c.relkind = 'm' THEN 'MATERIALIZED VIEW ' WHEN c.relkind = 'v' THEN 'VIEW ' WHEN c.relkind = 'i' THEN 'INDEX ' WHEN c.relkind = 'S' THEN 'SEQUENCE ' ELSE 'XX' END || n.nspname || '.' ||
|
||||
CASE WHEN c.relkind in ('r','p') AND a.attname IS NOT NULL THEN quote_ident(c.relname) || '.' || a.attname ELSE quote_ident(c.relname) END || ' IS ' || quote_literal(d.description) || ';' as ddl
|
||||
FROM pg_class c JOIN pg_namespace n ON (n.oid = c.relnamespace) LEFT JOIN pg_description d ON (c.oid = d.objoid) LEFT JOIN pg_attribute a ON (c.oid = a.attrelid AND a.attnum > 0 and a.attnum = d.objsubid)
|
||||
WHERE d.description IS NOT NULL AND n.nspname = in_schema AND c.relname = in_table ORDER BY 2 desc, ddl
|
||||
LOOP
|
||||
--RAISE INFO 'comments:%', v_rec.ddl;
|
||||
v_table_ddl = v_table_ddl || v_rec.ddl || E'\\n';
|
||||
END LOOP;
|
||||
END IF;
|
||||
IF bVerbose THEN RAISE INFO '(8)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
IF trigtype = 'INCLUDE_TRIGGERS' THEN
|
||||
-- Issue#14: handle multiple triggers for a table
|
||||
FOR v_trigrec IN
|
||||
select pg_get_triggerdef(t.oid, True) || ';' as triggerdef FROM pg_trigger t, pg_class c, pg_namespace n
|
||||
WHERE n.nspname = in_schema and n.oid = c.relnamespace and c.relname = in_table and c.relkind = 'r' and t.tgrelid = c.oid and NOT t.tgisinternal
|
||||
LOOP
|
||||
v_table_ddl := v_table_ddl || v_trigrec.triggerdef;
|
||||
v_table_ddl := v_table_ddl || E'\\n';
|
||||
IF bVerbose THEN RAISE INFO 'triggerdef = %', v_trigrec.triggerdef; END IF;
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
IF bVerbose THEN RAISE INFO '(9)tabledef so far: %', v_table_ddl; END IF;
|
||||
-- add empty line
|
||||
v_table_ddl := v_table_ddl || E'\\n';
|
||||
IF bVerbose THEN RAISE INFO '(10)tabledef so far: %', v_table_ddl; END IF;
|
||||
|
||||
-- reset search_path back to what it was
|
||||
IF v_src_path_old = '' THEN
|
||||
SELECT set_config('search_path', '', false) into v_temp;
|
||||
ELSE
|
||||
EXECUTE 'SET search_path = ' || v_src_path_old;
|
||||
END IF;
|
||||
|
||||
RETURN v_table_ddl;
|
||||
|
||||
EXCEPTION
|
||||
WHEN others THEN
|
||||
BEGIN
|
||||
GET STACKED DIAGNOSTICS v_diag1 = MESSAGE_TEXT, v_diag2 = PG_EXCEPTION_DETAIL, v_diag3 = PG_EXCEPTION_HINT, v_diag4 = RETURNED_SQLSTATE, v_diag5 = PG_CONTEXT, v_diag6 = PG_EXCEPTION_CONTEXT;
|
||||
-- v_ret := 'line=' || v_diag6 || '. '|| v_diag4 || '. ' || v_diag1 || ' .' || v_diag2 || ' .' || v_diag3;
|
||||
v_ret := 'line=' || v_diag6 || '. '|| v_diag4 || '. ' || v_diag1;
|
||||
RAISE EXCEPTION '%', v_ret;
|
||||
-- put additional coding here if necessarY
|
||||
RETURN '';
|
||||
END;
|
||||
|
||||
END;
|
||||
$$;""".indent(1);
|
||||
|
||||
public static final String ENUM_TYPE_DDL_SQL = """
|
||||
SELECT 'CREATE TYPE "' || n.nspname || '"."' || t.typname || '" AS ENUM (' ||
|
||||
string_agg(quote_literal(e.enumlabel), ', ') || ');' AS ddl
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
|
||||
WHERE t.typtype = 'e'
|
||||
GROUP BY n.nspname, t.typname;""";
|
||||
}
|
||||
|
@ -1,34 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>ai.chat2db</groupId>
|
||||
<artifactId>chat2db-plugins</artifactId>
|
||||
<version>${revision}</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>ai.chat2db</groupId>
|
||||
<artifactId>chat2db-spi</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<artifactId>chat2db-redis</artifactId>
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/java</directory>
|
||||
<includes>
|
||||
<!--The properties configuration file will be placed together with the compiled class file-->
|
||||
<include>**/*.json</include>
|
||||
</includes>
|
||||
</resource>
|
||||
<resource>
|
||||
<directory>src/main/resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
</build>
|
||||
</project>
|
@ -1,8 +0,0 @@
|
||||
package ai.chat2db.plugin.redis;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
|
||||
public class RedisDBManage extends DefaultDBManage implements DBManage {
|
||||
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
package ai.chat2db.plugin.redis;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.Database;
|
||||
import ai.chat2db.spi.model.Table;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class RedisMetaData extends DefaultMetaService implements MetaData {
|
||||
|
||||
@Override
|
||||
public List<Database> databases(Connection connection) {
|
||||
List<Database> databases = new ArrayList<>();
|
||||
return SQLExecutor.getInstance().execute(connection,"config get databases", resultSet -> {
|
||||
try {
|
||||
if (resultSet.next()) {
|
||||
Object count = resultSet.getObject(2);
|
||||
if(StringUtils.isNotBlank(count.toString())) {
|
||||
for (int i = 0; i < Integer.parseInt(count.toString()); i++) {
|
||||
Database database = Database.builder().name(String.valueOf(i)).build();
|
||||
databases.add(database);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return databases;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Table> tables(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
return SQLExecutor.getInstance().execute(connection,"scan 0 MATCH * COUNT 1000", resultSet -> {
|
||||
List<Table> tables = new ArrayList<>();
|
||||
try {
|
||||
while (resultSet.next()) {
|
||||
ArrayList list = (ArrayList)resultSet.getObject(2);
|
||||
for (Object object : list) {
|
||||
Table table = new Table();
|
||||
table.setName(object.toString());
|
||||
tables.add(table);
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return tables;
|
||||
});
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
package ai.chat2db.plugin.redis;
|
||||
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.Plugin;
|
||||
import ai.chat2db.spi.config.DBConfig;
|
||||
import ai.chat2db.spi.util.FileUtils;
|
||||
|
||||
public class RedisPlugin implements Plugin {
|
||||
|
||||
@Override
|
||||
public DBConfig getDBConfig() {
|
||||
return FileUtils.readJsonValue(this.getClass(),"redis.json", DBConfig.class);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaData getMetaData() {
|
||||
return new RedisMetaData();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBManage getDBManage() {
|
||||
return new RedisDBManage();
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
"dbType": "REDIS",
|
||||
"supportDatabase": false,
|
||||
"supportSchema": false,
|
||||
"driverConfigList": [
|
||||
{
|
||||
"url": "jdbc:redis://127.0.0.1:6379/0",
|
||||
"custom": false,
|
||||
"defaultDriver": true,
|
||||
"downloadJdbcDriverUrls": [
|
||||
"https://oss.sqlgpt.cn/lib/redis-jdbc-driver-1.3.jar"
|
||||
],
|
||||
"jdbcDriver": "redis-jdbc-driver-1.3.jar",
|
||||
"jdbcDriverClass": "jdbc.RedisDriver"
|
||||
}
|
||||
],
|
||||
"name": "Redis"
|
||||
}
|
@ -1 +0,0 @@
|
||||
ai.chat2db.plugin.redis.RedisPlugin
|
@ -3,5 +3,108 @@ package ai.chat2db.plugin.sqlite;
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class SqliteDBManage extends DefaultDBManage implements DBManage {
|
||||
|
||||
|
||||
@Override
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTables(connection, databaseName, sqlBuilder, containData);
|
||||
exportViews(connection, databaseName, sqlBuilder);
|
||||
exportTriggers(connection, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private void exportTables(Connection connection, String databaseName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(databaseName, null, null, new String[]{"TABLE", "SYSTEM TABLE"})) {
|
||||
while (resultSet.next()) {
|
||||
exportTable(connection, resultSet.getString("TABLE_NAME"), sqlBuilder, containData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTable(Connection connection, String tableName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
String sql = String.format("SELECT sql FROM sqlite_master WHERE type='table' AND name='%s'", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("DROP TABLE IF EXISTS ").append(format(tableName)).append(";").append("\n")
|
||||
.append(resultSet.getString("sql")).append(";").append("\n");
|
||||
if (containData) {
|
||||
exportTableData(connection, tableName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String format(String tableName) {
|
||||
return "\""+tableName+"\"";
|
||||
}
|
||||
|
||||
|
||||
private void exportTableData(Connection connection, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("select * from %s", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
private void exportViews(Connection connection, String databaseName, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.getMetaData().getTables(databaseName, null, null, new String[]{"VIEW"})) {
|
||||
while (resultSet.next()) {
|
||||
exportView(connection, resultSet.getString("TABLE_NAME"), sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportView(Connection connection, String viewName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT * FROM sqlite_master WHERE type = 'view' and name='%s';", viewName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("DROP VIEW IF EXISTS ").append(format(viewName)).append(";").append("\n")
|
||||
.append(resultSet.getString("sql")).append(";").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTriggers(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = "SELECT * FROM sqlite_master WHERE type = 'trigger';";
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String triggerName = resultSet.getString("name");
|
||||
exportTrigger(connection, triggerName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTrigger(Connection connection, String triggerName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT * FROM sqlite_master WHERE type = 'trigger' and name='%s';", triggerName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("sql")).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,5 @@
|
||||
package ai.chat2db.plugin.sqlite;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import ai.chat2db.plugin.sqlite.builder.SqliteBuilder;
|
||||
import ai.chat2db.plugin.sqlite.type.SqliteCollationEnum;
|
||||
import ai.chat2db.plugin.sqlite.type.SqliteColumnTypeEnum;
|
||||
@ -14,18 +8,69 @@ import ai.chat2db.plugin.sqlite.type.SqliteIndexTypeEnum;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.Database;
|
||||
import ai.chat2db.spi.model.Schema;
|
||||
import ai.chat2db.spi.model.TableMeta;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class SqliteMetaData extends DefaultMetaService implements MetaData {
|
||||
private static String VIEW_DDL_SQL="SELECT * FROM sqlite_master WHERE type = 'view' and name='%s';";
|
||||
@Override
|
||||
public Table view(Connection connection, String databaseName, String schemaName, String viewName) {
|
||||
Table view = new Table();
|
||||
String sql = String.format(VIEW_DDL_SQL,viewName);
|
||||
SQLExecutor.getInstance().execute(connection, sql, resultSet->{
|
||||
if (resultSet.next()) {
|
||||
view.setDatabaseName(databaseName);
|
||||
view.setDdl(resultSet.getString("sql"));
|
||||
}
|
||||
});
|
||||
return view;
|
||||
}
|
||||
|
||||
private static final String TRIGGER_LIST_SQL = "SELECT * FROM sqlite_master WHERE type = 'trigger';";
|
||||
private static String TRIGGER_DDL_SQL = "SELECT * FROM sqlite_master WHERE type = 'trigger' and name='%s';";
|
||||
|
||||
@Override
|
||||
public List<Trigger> triggers(Connection connection, String databaseName, String schemaName) {
|
||||
List<Trigger> triggers = new ArrayList<>();
|
||||
return SQLExecutor.getInstance().execute(connection, TRIGGER_LIST_SQL, resultSet -> {
|
||||
while (resultSet.next()) {
|
||||
Trigger trigger = new Trigger();
|
||||
String triggerName = resultSet.getString("name");
|
||||
trigger.setTriggerName(triggerName);
|
||||
trigger.setDatabaseName(databaseName);
|
||||
triggers.add(trigger);
|
||||
}
|
||||
return triggers;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trigger trigger(Connection connection, String databaseName, String schemaName, String triggerName) {
|
||||
Trigger trigger = new Trigger();
|
||||
String sql = String.format(TRIGGER_DDL_SQL, triggerName);
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
while (resultSet.next()) {
|
||||
trigger.setTriggerName(triggerName);
|
||||
trigger.setDatabaseName(databaseName);
|
||||
trigger.setTriggerBody(resultSet.getString("sql"));
|
||||
}
|
||||
return trigger;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public String tableDDL(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
String sql = "SELECT sql FROM sqlite_master WHERE type='table' AND name='" + tableName + "'";
|
||||
return SQLExecutor.getInstance().execute(connection,sql, resultSet -> {
|
||||
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
|
||||
try {
|
||||
if (resultSet.next()) {
|
||||
return resultSet.getString("sql");
|
||||
@ -36,13 +81,14 @@ public class SqliteMetaData extends DefaultMetaService implements MetaData {
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Database> databases(Connection connection) {
|
||||
return Lists.newArrayList(Database.builder().name("main").build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Schema> schemas(Connection connection,String databaseName) {
|
||||
public List<Schema> schemas(Connection connection, String databaseName) {
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
@ -50,6 +96,7 @@ public class SqliteMetaData extends DefaultMetaService implements MetaData {
|
||||
public SqlBuilder getSqlBuilder() {
|
||||
return new SqliteBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TableMeta getTableMeta(String databaseName, String schemaName, String tableName) {
|
||||
return TableMeta.builder()
|
||||
|
@ -10,7 +10,7 @@ import ai.chat2db.spi.model.TableIndex;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
|
||||
public class SqliteBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class SqliteBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -1,13 +1,180 @@
|
||||
package ai.chat2db.plugin.sqlserver;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.jdbc.DefaultDBManage;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
|
||||
import java.sql.*;
|
||||
import java.util.Objects;
|
||||
|
||||
public class SqlServerDBManage extends DefaultDBManage implements DBManage {
|
||||
private String tableDDLFunction
|
||||
= "CREATE FUNCTION tableSchema.ufn_GetCreateTableScript( @schema_name NVARCHAR(128), @table_name NVARCHAR"
|
||||
+ "(128)) RETURNS NVARCHAR(MAX) AS BEGIN DECLARE @CreateTableScript NVARCHAR(MAX); DECLARE @IndexScripts "
|
||||
+ "NVARCHAR(MAX) = ''; DECLARE @ColumnDescriptions NVARCHAR(MAX) = N''; SELECT @CreateTableScript = CONCAT( "
|
||||
+ "'CREATE TABLE [', s.name, '].[' , t.name, '] (', STUFF( ( SELECT ', [' + c.name + '] ' + tp.name + CASE "
|
||||
+ "WHEN tp.name IN ('varchar', 'nvarchar', 'char', 'nchar') THEN '(' + IIF(c.max_length = -1, 'MAX', CAST(c"
|
||||
+ ".max_length AS NVARCHAR(10))) + ')' WHEN tp.name IN ('decimal', 'numeric') THEN '(' + CAST(c.precision AS "
|
||||
+ "NVARCHAR(10)) + ', ' + CAST(c.scale AS NVARCHAR(10)) + ')' ELSE '' END + ' ' + CASE WHEN c.is_nullable = 1"
|
||||
+ " THEN 'NULL' ELSE 'NOT NULL' END FROM sys.columns c JOIN sys.types tp ON c.user_type_id = tp.user_type_id "
|
||||
+ "WHERE c.object_id = t.object_id FOR XML PATH(''), TYPE ).value('/', 'nvarchar(max)'), 1, 1, ''), ');' ) "
|
||||
+ "FROM sys.tables t JOIN sys.schemas s ON t.schema_id = s.schema_id WHERE t.name = @table_name AND s.name = "
|
||||
+ "@schema_name; SELECT @IndexScripts = @IndexScripts + 'CREATE ' + CASE WHEN i.is_unique = 1 THEN 'UNIQUE ' "
|
||||
+ "ELSE '' END + i.type_desc + ' INDEX [' + i.name + '] ON [' + s.name + '].[' + t.name + '] (' + STUFF( ( "
|
||||
+ "SELECT ', [' + c.name + ']' + CASE WHEN ic.is_descending_key = 1 THEN ' DESC' ELSE ' ASC' END FROM sys"
|
||||
+ ".index_columns ic JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id WHERE ic"
|
||||
+ ".object_id = i.object_id AND ic.index_id = i.index_id ORDER BY ic.key_ordinal FOR XML PATH('') ), 1, 1, "
|
||||
+ "'') + ')' + CASE WHEN i.has_filter = 1 THEN ' WHERE ' + i.filter_definition ELSE '' END + ';' + CHAR(13) +"
|
||||
+ " CHAR(10) FROM sys.indexes i JOIN sys.tables t ON i.object_id = t.object_id JOIN sys.schemas s ON t"
|
||||
+ ".schema_id = s.schema_id WHERE i.type > 0 AND t.name = @table_name AND s.name "
|
||||
+ "= @schema_name; SELECT @ColumnDescriptions += 'EXEC sp_addextendedproperty @name=N''MS_Description'', "
|
||||
+ "@value=N''' + CAST(p.value AS NVARCHAR(MAX)) + ''', @level0type=N''SCHEMA'', @level0name=N''' + "
|
||||
+ "@schema_name + ''', @level1type=N''TABLE'', @level1name=N''' + @table_name + ''', @level2type=N''COLUMN'',"
|
||||
+ " @level2name=N''' + c.name + ''';' + CHAR(13) + CHAR(10) FROM sys.extended_properties p JOIN sys.columns c"
|
||||
+ " ON p.major_id = c.object_id AND p.minor_id = c.column_id JOIN sys.tables t ON c.object_id = t.object_id "
|
||||
+ "JOIN sys.schemas s ON t.schema_id = s.schema_id WHERE p.class = 1 AND t.name = @table_name AND s.name = "
|
||||
+ "@schema_name; SET @CreateTableScript = @CreateTableScript + CHAR(13) + CHAR(10) + @IndexScripts + CHAR(13)"
|
||||
+ " + CHAR(10)+ @ColumnDescriptions+ CHAR(10); RETURN @CreateTableScript; END";
|
||||
|
||||
private static String TRIGGER_SQL_LIST
|
||||
= "SELECT OBJECT_NAME(parent_obj) AS TableName, name AS triggerName, OBJECT_DEFINITION(id) AS "
|
||||
+ "triggerDefinition, CASE WHEN status & 1 = 1 THEN 'Enabled' ELSE 'Disabled' END AS Status FROM sysobjects "
|
||||
+ "WHERE xtype = 'TR' ";
|
||||
|
||||
@Override
|
||||
public String exportDatabase(Connection connection, String databaseName, String schemaName, boolean containData) throws SQLException {
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
exportTables(connection, sqlBuilder, schemaName, containData);
|
||||
exportViews(connection, databaseName, schemaName, sqlBuilder);
|
||||
exportFunctions(connection, schemaName, sqlBuilder);
|
||||
exportProcedures(connection, schemaName, sqlBuilder);
|
||||
exportTriggers(connection, sqlBuilder);
|
||||
return sqlBuilder.toString();
|
||||
}
|
||||
|
||||
private void exportTables(Connection connection, StringBuilder sqlBuilder, String schemaName, boolean containData) throws SQLException {
|
||||
String sql ="SELECT name FROM SysObjects Where XType='U'";
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String tableName = resultSet.getString("name");
|
||||
exportTable(connection, tableName, schemaName, sqlBuilder, containData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTable(Connection connection, String tableName, String schemaName, StringBuilder sqlBuilder, boolean containData) throws SQLException {
|
||||
try {
|
||||
SQLExecutor.getInstance().execute(connection, tableDDLFunction.replace("tableSchema", schemaName),
|
||||
resultSet -> null);
|
||||
} catch (Exception e) {
|
||||
//log.error("Failed to create function", e);
|
||||
}
|
||||
String sql = String.format("SELECT %s.ufn_GetCreateTableScript('%s', '%s') as ddl",schemaName,schemaName,tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append("DROP TABLE IF EXISTS ").append(tableName).append(";").append("\n")
|
||||
.append(resultSet.getString("ddl")).append("\n");
|
||||
if (containData) {
|
||||
exportTableData(connection, tableName, sqlBuilder);
|
||||
} else {
|
||||
sqlBuilder.append("go").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void exportTableData(Connection connection, String tableName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("select * from %s", tableName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("INSERT INTO ").append(tableName).append(" VALUES (");
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
String value = resultSet.getString(i);
|
||||
if (Objects.isNull(value)) {
|
||||
sqlBuilder.append("NULL");
|
||||
} else {
|
||||
sqlBuilder.append("'").append(value).append("'");
|
||||
}
|
||||
if (i < metaData.getColumnCount()) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
sqlBuilder.append(");\n");
|
||||
}
|
||||
sqlBuilder.append("\n");
|
||||
}
|
||||
sqlBuilder.append("go").append("\n");
|
||||
}
|
||||
|
||||
private void exportViews(Connection connection, String databaseName, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT TABLE_NAME, VIEW_DEFINITION FROM INFORMATION_SCHEMA.VIEWS " +
|
||||
"WHERE TABLE_SCHEMA = '%s' AND TABLE_CATALOG = '%s'; ", schemaName, databaseName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append("DROP VIEW IF EXISTS ").append(resultSet.getString("TABLE_NAME")).append(";\n").append("go").append("\n")
|
||||
.append(resultSet.getString("VIEW_DEFINITION")).append(";").append("\n")
|
||||
.append("go").append("\n");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void exportFunctions(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT name FROM sys.objects WHERE type = 'FN' and SCHEMA_ID = SCHEMA_ID('%s')", schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String functionName = resultSet.getString("name");
|
||||
exportFunction(connection, functionName, schemaName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportFunction(Connection connection, String functionName, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT OBJECT_DEFINITION(OBJECT_ID('%s.%s')) as ddl", schemaName, functionName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("ddl")
|
||||
.replace("CREATE FUNCTION", "CREATE OR ALTER FUNCTION"))
|
||||
.append("\n").append("go").append("\n");
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedures(Connection connection, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT name FROM sys.procedures WHERE SCHEMA_ID = SCHEMA_ID('%s')", schemaName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
while (resultSet.next()) {
|
||||
String procedureName = resultSet.getString("name");
|
||||
exportProcedure(connection, procedureName, schemaName, sqlBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportProcedure(Connection connection, String procedureName, String schemaName, StringBuilder sqlBuilder) throws SQLException {
|
||||
String sql = String.format("SELECT definition FROM sys.sql_modules WHERE object_id = (OBJECT_ID('%s.%s'));", schemaName, procedureName);
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(sql)) {
|
||||
if (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("definition")
|
||||
.replace("CREATE PROCEDURE", "CREATE OR ALTER PROCEDURE"))
|
||||
.append("\n").append("go").append("\n");
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void exportTriggers(Connection connection, StringBuilder sqlBuilder) throws SQLException {
|
||||
try (ResultSet resultSet = connection.createStatement().executeQuery(TRIGGER_SQL_LIST)) {
|
||||
while (resultSet.next()) {
|
||||
sqlBuilder.append(resultSet.getString("triggerDefinition")
|
||||
.replace("CREATE TRIGGER", "CREATE OR ALTER TRIGGER"))
|
||||
.append("\n").append("go").append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public void connectDatabase(Connection connection, String database) {
|
||||
try {
|
||||
|
@ -17,6 +17,7 @@ import ai.chat2db.spi.jdbc.DefaultMetaService;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import ai.chat2db.spi.util.SortUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@ -398,4 +399,14 @@ public class SqlServerMetaData extends DefaultMetaService implements MetaData {
|
||||
public CommandExecutor getCommandExecutor() {
|
||||
return new SqlServerCommandExecutor();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return systemDatabases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return systemSchemas;
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.Chat2DBContext;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class SqlServerSqlBuilder extends DefaultSqlBuilder implements SqlBuilder {
|
||||
public class SqlServerSqlBuilder extends DefaultSqlBuilder {
|
||||
@Override
|
||||
public String buildCreateTableSql(Table table) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
|
@ -28,7 +28,6 @@
|
||||
<module>chat2db-mongodb</module>
|
||||
<module>chat2db-presto</module>
|
||||
<module>chat2db-hive</module>
|
||||
<module>chat2db-redis</module>
|
||||
<module>chat2db-kingbase</module>
|
||||
</modules>
|
||||
|
||||
|
@ -37,6 +37,9 @@ public class DlExecuteParam {
|
||||
private String databaseName;
|
||||
|
||||
|
||||
private String tableName;
|
||||
|
||||
|
||||
/**
|
||||
* schema name
|
||||
*/
|
||||
|
@ -0,0 +1,15 @@
|
||||
package ai.chat2db.server.domain.api.param;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
@Data
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DmlSqlCopyParam extends TableQueryParam{
|
||||
|
||||
private String type;
|
||||
}
|
@ -27,6 +27,14 @@ public interface DlTemplateService {
|
||||
ListResult<ExecuteResult> execute(DlExecuteParam param);
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param param
|
||||
* @return
|
||||
*/
|
||||
ListResult<ExecuteResult> executeSelectTable(DlExecuteParam param);
|
||||
|
||||
|
||||
/**
|
||||
* Data source execution update
|
||||
*
|
||||
|
@ -129,4 +129,12 @@ public interface TableService {
|
||||
* @return
|
||||
*/
|
||||
DataResult<Boolean> checkTableVector(TableVectorParam param);
|
||||
|
||||
|
||||
/**
|
||||
* Get dml template sql
|
||||
* @param param table query param
|
||||
* @return sql
|
||||
*/
|
||||
DataResult<String> copyDmlSql(DmlSqlCopyParam param);
|
||||
}
|
||||
|
@ -111,11 +111,6 @@
|
||||
<artifactId>chat2db-presto</artifactId>
|
||||
<version>${revision}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ai.chat2db</groupId>
|
||||
<artifactId>chat2db-redis</artifactId>
|
||||
<version>${revision}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ai.chat2db</groupId>
|
||||
<artifactId>chat2db-sqlite</artifactId>
|
||||
|
@ -17,7 +17,6 @@ import ai.chat2db.spi.ValueHandler;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.Chat2DBContext;
|
||||
import ai.chat2db.spi.sql.ConnectInfo;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import ai.chat2db.spi.util.JdbcUtils;
|
||||
import ai.chat2db.spi.util.SqlUtils;
|
||||
import com.alibaba.druid.DbType;
|
||||
@ -28,6 +27,7 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@ -61,12 +61,15 @@ public class DlTemplateServiceImpl implements DlTemplateService {
|
||||
CommandExecutor executor = Chat2DBContext.getMetaData().getCommandExecutor();
|
||||
Command command = commandConverter.param2model(param);
|
||||
List<ExecuteResult> results = executor.execute(command);
|
||||
return reBuildHeader(results,param.getSchemaName(),param.getDatabaseName());
|
||||
}
|
||||
|
||||
private ListResult<ExecuteResult> reBuildHeader(List<ExecuteResult> results,String schemaName,String databaseName){
|
||||
ListResult<ExecuteResult> listResult = ListResult.of(results);
|
||||
for (ExecuteResult executeResult : results) {
|
||||
List<Header> headers = executeResult.getHeaderList();
|
||||
if (executeResult.getSuccess() && executeResult.isCanEdit() && CollectionUtils.isNotEmpty(headers)) {
|
||||
headers = setColumnInfo(headers, executeResult.getTableName(), param.getSchemaName(),
|
||||
param.getDatabaseName());
|
||||
headers = setColumnInfo(headers, executeResult.getTableName(), schemaName, databaseName);
|
||||
executeResult.setHeaderList(headers);
|
||||
}
|
||||
if (!executeResult.getSuccess()) {
|
||||
@ -77,12 +80,13 @@ public class DlTemplateServiceImpl implements DlTemplateService {
|
||||
addOperationLog(executeResult);
|
||||
}
|
||||
return listResult;
|
||||
}
|
||||
|
||||
// if ("SQLSERVER".equalsIgnoreCase(type)) {
|
||||
// RemoveSpecialGO(param);
|
||||
// }
|
||||
|
||||
|
||||
@Override
|
||||
public ListResult<ExecuteResult> executeSelectTable(DlExecuteParam param) {
|
||||
Command command = commandConverter.param2model(param);
|
||||
List<ExecuteResult> results = Chat2DBContext.getMetaData().getCommandExecutor().executeSelectTable(command);
|
||||
return reBuildHeader(results,param.getSchemaName(),param.getDatabaseName());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -159,8 +163,9 @@ public class DlTemplateServiceImpl implements DlTemplateService {
|
||||
@Override
|
||||
public DataResult<String> updateSelectResult(UpdateSelectResultParam param) {
|
||||
SqlBuilder sqlBuilder = Chat2DBContext.getSqlBuilder();
|
||||
String sql = sqlBuilder.generateSqlBasedOnResults(param.getTableName(), param.getHeaderList(),
|
||||
param.getOperations());
|
||||
QueryResult queryResult = new QueryResult();
|
||||
BeanUtils.copyProperties(param, queryResult);
|
||||
String sql = sqlBuilder.buildSqlByQuery(queryResult);
|
||||
return DataResult.of(sql);
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,6 @@ public class TableServiceImpl implements TableService {
|
||||
private TableConverter tableConverter;
|
||||
|
||||
|
||||
|
||||
private TableCacheVersionMapper getVersionMapper() {
|
||||
return Dbutils.getMapper(TableCacheVersionMapper.class);
|
||||
}
|
||||
@ -251,7 +250,7 @@ public class TableServiceImpl implements TableService {
|
||||
keyIndex.setSchemaName(newTable.getSchemaName());
|
||||
keyIndex.setDatabaseName(newTable.getDatabaseName());
|
||||
keyIndex.setEditStatus(status);
|
||||
if(!EditStatus.ADD.name().equals(status)){
|
||||
if (!EditStatus.ADD.name().equals(status)) {
|
||||
keyIndex.setOldName(keyIndex.getName());
|
||||
}
|
||||
indexes.add(keyIndex);
|
||||
@ -321,7 +320,7 @@ public class TableServiceImpl implements TableService {
|
||||
long total = 0;
|
||||
long version = 0L;
|
||||
if (param.isRefresh() || versionDO == null) {
|
||||
total = addCache(param,versionDO);
|
||||
total = addCache(param, versionDO);
|
||||
} else {
|
||||
if ("2".equals(versionDO.getStatus())) {
|
||||
version = versionDO.getVersion() - 1;
|
||||
@ -350,7 +349,7 @@ public class TableServiceImpl implements TableService {
|
||||
return PageResult.of(tables, total, param);
|
||||
}
|
||||
|
||||
private long addCache(TablePageQueryParam param,TableCacheVersionDO versionDO){
|
||||
private long addCache(TablePageQueryParam param, TableCacheVersionDO versionDO) {
|
||||
LambdaQueryWrapper<TableCacheVersionDO> queryWrapper = new LambdaQueryWrapper<>();
|
||||
String key = getTableKey(param.getDataSourceId(), param.getDatabaseName(), param.getSchemaName());
|
||||
queryWrapper.eq(TableCacheVersionDO::getKey, key);
|
||||
@ -388,7 +387,7 @@ public class TableServiceImpl implements TableService {
|
||||
queryWrapper.eq(TableCacheVersionDO::getKey, key);
|
||||
TableCacheVersionDO versionDO = getVersionMapper().selectOne(queryWrapper);
|
||||
if (versionDO == null) {
|
||||
addCache(param,versionDO);
|
||||
addCache(param, versionDO);
|
||||
versionDO = getVersionMapper().selectOne(queryWrapper);
|
||||
}
|
||||
long version = "2".equals(versionDO.getStatus()) ? versionDO.getVersion() - 1 : versionDO.getVersion();
|
||||
@ -598,4 +597,13 @@ public class TableServiceImpl implements TableService {
|
||||
}
|
||||
return DataResult.of(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataResult<String> copyDmlSql(DmlSqlCopyParam param) {
|
||||
List<TableColumn> columns = queryColumns(param);
|
||||
SqlBuilder sqlBuilder = Chat2DBContext.getSqlBuilder();
|
||||
Table table = Table.builder().name(param.getTableName()).columnList(columns).build();
|
||||
String sql = sqlBuilder.getTableDmlSql(table, param.getType());
|
||||
return DataResult.of(sql);
|
||||
}
|
||||
}
|
||||
|
@ -111,7 +111,7 @@ public class DatabaseController {
|
||||
}
|
||||
@PostMapping("/export")
|
||||
public void exportDatabase(@Valid @RequestBody DatabaseExportRequest request, HttpServletResponse response){
|
||||
String fileName = Objects.isNull(request.getDatabaseName()) ? request.getSchemaName() : request.getDatabaseName();
|
||||
String fileName = Objects.isNull(request.getSchemaName())?request.getDatabaseName() : request.getSchemaName();
|
||||
response.setContentType("text/sql");
|
||||
response.setHeader("Content-disposition", "attachment;filename*=utf-8''" + fileName + ".sql");
|
||||
response.setCharacterEncoding("utf-8");
|
||||
|
@ -97,18 +97,7 @@ public class RdbDmlController {
|
||||
@RequestMapping(value = "/execute_table", method = {RequestMethod.POST, RequestMethod.PUT})
|
||||
public ListResult<ExecuteResultVO> executeTable(@RequestBody DmlTableRequest request) {
|
||||
DlExecuteParam param = rdbWebConverter.request2param(request);
|
||||
// parse sql
|
||||
String type = Chat2DBContext.getConnectInfo().getDbType();
|
||||
MetaData metaData = Chat2DBContext.getMetaData();
|
||||
if (DataSourceTypeEnum.MONGODB.getCode().equals(type)) {
|
||||
param.setSql("db." + request.getTableName() + ".find()");
|
||||
} else if (DataSourceTypeEnum.SQLSERVER.getCode().equals(type)){
|
||||
param.setSql("select * from" + metaData.getMetaDataName(request.getSchemaName()) + "." + metaData.getMetaDataName(request.getTableName()));
|
||||
}else {
|
||||
// Splice `tableName` to avoid the problem of keywords being occupied
|
||||
param.setSql("select * from " + metaData.getMetaDataName(request.getTableName()));
|
||||
}
|
||||
return dlTemplateService.execute(param)
|
||||
return dlTemplateService.executeSelectTable(param)
|
||||
.map(rdbWebConverter::dto2vo);
|
||||
}
|
||||
|
||||
|
@ -112,6 +112,12 @@ public class TableController extends EmbeddingController {
|
||||
return ListResult.of(tableVOS);
|
||||
}
|
||||
|
||||
@GetMapping("/copy_dml_sql")
|
||||
public DataResult<String> copyDmlSql(@Valid DmlSqlCopyRequest request) {
|
||||
DmlSqlCopyParam queryParam = rdbWebConverter.dmlRequest2param(request);
|
||||
return tableService.copyDmlSql(queryParam);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the table index under the current DB
|
||||
*
|
||||
|
@ -254,4 +254,6 @@ public abstract class RdbWebConverter {
|
||||
public abstract EsTableSchemaRequest req2req(TableBriefQueryRequest request);
|
||||
|
||||
public abstract TablePageQueryParam schemaReq2page(EsTableSchemaRequest request);
|
||||
|
||||
public abstract DmlSqlCopyParam dmlRequest2param(DmlSqlCopyRequest request) ;
|
||||
}
|
||||
|
@ -0,0 +1,14 @@
|
||||
package ai.chat2db.server.web.api.controller.rdb.request;
|
||||
|
||||
import ai.chat2db.server.web.api.controller.data.source.request.DataSourceBaseRequest;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class DmlSqlCopyRequest extends DataSourceBaseRequest {
|
||||
|
||||
@NotNull
|
||||
private String tableName;
|
||||
|
||||
private String type;
|
||||
}
|
@ -86,4 +86,21 @@
|
||||
<artifactId>bson</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.2.1</version> <!-- 使用最新版本 -->
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<goals>
|
||||
<goal>jar</goal> <!-- 打包源代码成jar -->
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -27,6 +27,12 @@ public interface CommandExecutor {
|
||||
ExecuteResult executeUpdate(String sql, Connection connection, int n)throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Execute command
|
||||
*/
|
||||
List<ExecuteResult> executeSelectTable(Command command);
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
|
@ -3,7 +3,9 @@ package ai.chat2db.spi;
|
||||
import java.sql.Connection;
|
||||
import java.util.List;
|
||||
|
||||
import ai.chat2db.server.tools.base.wrapper.result.PageResult;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import cn.hutool.db.Page;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
|
||||
/**
|
||||
@ -52,6 +54,19 @@ public interface MetaData {
|
||||
*/
|
||||
List<Table> tables(Connection connection, @NotEmpty String databaseName, String schemaName, String tableName);
|
||||
|
||||
|
||||
/**
|
||||
* Querying all table under a schema.
|
||||
*
|
||||
* @param connection
|
||||
* @param databaseName
|
||||
* @param schemaName
|
||||
* @param tableNamePattern
|
||||
* @param pageNo
|
||||
* @param pageSize
|
||||
* @return
|
||||
*/
|
||||
PageResult<Table> tables(Connection connection, String databaseName, String schemaName, String tableNamePattern, int pageNo, int pageSize);
|
||||
/**
|
||||
* Querying view information.
|
||||
*
|
||||
@ -216,4 +231,17 @@ public interface MetaData {
|
||||
* Get command executor.
|
||||
*/
|
||||
CommandExecutor getCommandExecutor();
|
||||
|
||||
/**
|
||||
* Get system databases.
|
||||
* @return
|
||||
*/
|
||||
List<String> getSystemDatabases();
|
||||
|
||||
/**
|
||||
* Get system schemas.
|
||||
* @return
|
||||
*/
|
||||
List<String> getSystemSchemas();
|
||||
|
||||
}
|
@ -4,7 +4,7 @@ import ai.chat2db.spi.model.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface SqlBuilder {
|
||||
public interface SqlBuilder<T> {
|
||||
|
||||
/**
|
||||
* Generate create table sql
|
||||
@ -12,7 +12,7 @@ public interface SqlBuilder {
|
||||
* @param table
|
||||
* @return
|
||||
*/
|
||||
String buildCreateTableSql(Table table);
|
||||
String buildCreateTableSql(T table);
|
||||
|
||||
|
||||
/**
|
||||
@ -22,7 +22,7 @@ public interface SqlBuilder {
|
||||
* @param oldTable
|
||||
* @return
|
||||
*/
|
||||
String buildModifyTaleSql(Table oldTable, Table newTable);
|
||||
String buildModifyTaleSql(T oldTable, T newTable);
|
||||
|
||||
|
||||
/**
|
||||
@ -79,6 +79,13 @@ public interface SqlBuilder {
|
||||
/**
|
||||
* generate sql based on results
|
||||
*/
|
||||
String generateSqlBasedOnResults(String tableName, List<Header> headerList, List<ResultOperation> operations);
|
||||
String buildSqlByQuery(QueryResult queryResult);
|
||||
|
||||
/**
|
||||
* DML SQL
|
||||
* @param table
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
String getTableDmlSql(T table,String type);
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
|
||||
package ai.chat2db.spi.config;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import ai.chat2db.spi.model.KeyValue;
|
||||
@ -12,7 +13,8 @@ import org.apache.commons.lang3.StringUtils;
|
||||
* @version : DriverConfig.java
|
||||
*/
|
||||
@Data
|
||||
public class DriverConfig {
|
||||
public class DriverConfig implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* url
|
||||
|
@ -0,0 +1,5 @@
|
||||
package ai.chat2db.spi.enums;
|
||||
|
||||
public enum DmlType {
|
||||
INSERT, UPDATE, DELETE, SELECT
|
||||
}
|
@ -23,19 +23,18 @@ import org.apache.commons.lang3.StringUtils;
|
||||
public class DefaultDBManage implements DBManage {
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Connection getConnection(ConnectInfo connectInfo) {
|
||||
Connection connection = connectInfo.getConnection();
|
||||
if (connection != null) {
|
||||
return connection;
|
||||
}
|
||||
Session session = null;
|
||||
SSHInfo ssh = connectInfo.getSsh();
|
||||
String url = connectInfo.getUrl();
|
||||
String host = connectInfo.getHost();
|
||||
String port = connectInfo.getPort() + "";
|
||||
Session session = null;
|
||||
try {
|
||||
if (connection != null && !connection.isClosed()) {
|
||||
return connection;
|
||||
}
|
||||
ssh.setRHost(host);
|
||||
ssh.setRPort(port);
|
||||
session = getSession(ssh);
|
||||
@ -47,7 +46,7 @@ public class DefaultDBManage implements DBManage {
|
||||
}
|
||||
try {
|
||||
connection = IDriverManager.getConnection(url, connectInfo.getUser(), connectInfo.getPassword(),
|
||||
connectInfo.getDriverConfig(), connectInfo.getExtendMap());
|
||||
connectInfo.getDriverConfig(), connectInfo.getExtendMap());
|
||||
|
||||
} catch (Exception e1) {
|
||||
if (connection != null) {
|
||||
@ -85,37 +84,37 @@ public class DefaultDBManage implements DBManage {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectDatabase(Connection connection,String database) {
|
||||
public void connectDatabase(Connection connection, String database) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void modifyDatabase(Connection connection,String databaseName, String newDatabaseName) {
|
||||
public void modifyDatabase(Connection connection, String databaseName, String newDatabaseName) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createDatabase(Connection connection,String databaseName) {
|
||||
public void createDatabase(Connection connection, String databaseName) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dropDatabase(Connection connection,String databaseName) {
|
||||
public void dropDatabase(Connection connection, String databaseName) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createSchema(Connection connection,String databaseName, String schemaName) {
|
||||
public void createSchema(Connection connection, String databaseName, String schemaName) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dropSchema(Connection connection,String databaseName, String schemaName) {
|
||||
public void dropSchema(Connection connection, String databaseName, String schemaName) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void modifySchema(Connection connection,String databaseName, String schemaName, String newSchemaName) {
|
||||
public void modifySchema(Connection connection, String databaseName, String schemaName, String newSchemaName) {
|
||||
|
||||
}
|
||||
|
||||
@ -146,8 +145,8 @@ public class DefaultDBManage implements DBManage {
|
||||
|
||||
|
||||
@Override
|
||||
public void dropTable(Connection connection,String databaseName, String schemaName, String tableName) {
|
||||
String sql = "DROP TABLE "+ tableName ;
|
||||
SQLExecutor.getInstance().execute(connection,sql, resultSet -> null);
|
||||
public void dropTable(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
String sql = "DROP TABLE " + tableName;
|
||||
SQLExecutor.getInstance().execute(connection, sql, resultSet -> null);
|
||||
}
|
||||
}
|
@ -3,15 +3,16 @@ package ai.chat2db.spi.jdbc;
|
||||
import java.sql.Connection;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import ai.chat2db.server.tools.base.wrapper.result.PageResult;
|
||||
import ai.chat2db.spi.CommandExecutor;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.ValueHandler;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.SQLExecutor;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@ -48,6 +49,16 @@ public class DefaultMetaService implements MetaData {
|
||||
return SQLExecutor.getInstance().tables(connection, StringUtils.isEmpty(databaseName) ? null : databaseName, StringUtils.isEmpty(schemaName) ? null : schemaName, tableName, new String[]{"TABLE","SYSTEM TABLE"});
|
||||
}
|
||||
|
||||
@Override
|
||||
public PageResult<Table> tables(Connection connection, String databaseName, String schemaName, String tableNamePattern, int pageNo, int pageSize) {
|
||||
List<Table> tables = tables(connection, databaseName, schemaName, tableNamePattern);
|
||||
if(CollectionUtils.isEmpty(tables)){
|
||||
return PageResult.of(tables,0L,pageNo, pageSize);
|
||||
}
|
||||
List result = tables.stream().skip((pageNo - 1) * pageSize).limit(pageSize).collect(Collectors.toList());
|
||||
return PageResult.of(result, (long) tables.size(), pageNo, pageSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table view(Connection connection, String databaseName, String schemaName, String viewName) {
|
||||
return null;
|
||||
@ -64,7 +75,11 @@ public class DefaultMetaService implements MetaData {
|
||||
if(CollectionUtils.isEmpty(functions)){
|
||||
return functions;
|
||||
}
|
||||
return functions.stream().filter(function -> StringUtils.isNotBlank(function.getFunctionName())).collect(Collectors.toList());
|
||||
return functions.stream().filter(function -> StringUtils.isNotBlank(function.getFunctionName())).map(function -> {
|
||||
String functionName = function.getFunctionName();
|
||||
function.setFunctionName(functionName.trim());
|
||||
return function;
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -79,7 +94,11 @@ public class DefaultMetaService implements MetaData {
|
||||
if(CollectionUtils.isEmpty(procedures)){
|
||||
return procedures;
|
||||
}
|
||||
return procedures.stream().filter(function -> StringUtils.isNotBlank(function.getProcedureName())).collect(Collectors.toList());
|
||||
return procedures.stream().filter(function -> StringUtils.isNotBlank(function.getProcedureName())).map(procedure -> {
|
||||
String procedureName = procedure.getProcedureName();
|
||||
procedure.setProcedureName(procedureName.trim());
|
||||
return procedure;
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -142,4 +161,14 @@ public class DefaultMetaService implements MetaData {
|
||||
public CommandExecutor getCommandExecutor() {
|
||||
return SQLExecutor.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemDatabases() {
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSystemSchemas() {
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
package ai.chat2db.spi.jdbc;
|
||||
|
||||
import ai.chat2db.server.tools.base.wrapper.result.DataResult;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.enums.DmlType;
|
||||
import ai.chat2db.spi.model.*;
|
||||
import ai.chat2db.spi.sql.Chat2DBContext;
|
||||
import ai.chat2db.spi.util.SqlUtils;
|
||||
@ -19,7 +19,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class DefaultSqlBuilder implements SqlBuilder {
|
||||
public class DefaultSqlBuilder implements SqlBuilder<Table> {
|
||||
|
||||
|
||||
@Override
|
||||
@ -90,8 +90,10 @@ public class DefaultSqlBuilder implements SqlBuilder {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String generateSqlBasedOnResults(String tableName, List<Header> headerList, List<ResultOperation> operations) {
|
||||
|
||||
public String buildSqlByQuery(QueryResult queryResult) {
|
||||
List<Header> headerList = queryResult.getHeaderList();
|
||||
List<ResultOperation> operations = queryResult.getOperations();
|
||||
String tableName = queryResult.getTableName();
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
MetaData metaSchema = Chat2DBContext.getMetaData();
|
||||
List<String> keyColumns = getPrimaryColumns(headerList);
|
||||
@ -101,13 +103,13 @@ public class DefaultSqlBuilder implements SqlBuilder {
|
||||
List<String> odlRow = operation.getOldDataList();
|
||||
String sql = "";
|
||||
if ("UPDATE".equalsIgnoreCase(operation.getType())) {
|
||||
sql = getUpdateSql(tableName,headerList, row, odlRow, metaSchema, keyColumns, false);
|
||||
sql = getUpdateSql(tableName, headerList, row, odlRow, metaSchema, keyColumns, false);
|
||||
} else if ("CREATE".equalsIgnoreCase(operation.getType())) {
|
||||
sql = getInsertSql(tableName,headerList, row, metaSchema);
|
||||
sql = getInsertSql(tableName, headerList, row, metaSchema);
|
||||
} else if ("DELETE".equalsIgnoreCase(operation.getType())) {
|
||||
sql = getDeleteSql(tableName,headerList, odlRow, metaSchema, keyColumns);
|
||||
sql = getDeleteSql(tableName, headerList, odlRow, metaSchema, keyColumns);
|
||||
} else if ("UPDATE_COPY".equalsIgnoreCase(operation.getType())) {
|
||||
sql = getUpdateSql(tableName,headerList, row, row, metaSchema, keyColumns, true);
|
||||
sql = getUpdateSql(tableName, headerList, row, row, metaSchema, keyColumns, true);
|
||||
}
|
||||
|
||||
stringBuilder.append(sql + ";\n");
|
||||
@ -115,6 +117,76 @@ public class DefaultSqlBuilder implements SqlBuilder {
|
||||
return stringBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTableDmlSql(Table table, String type) {
|
||||
if (table == null || CollectionUtils.isEmpty(table.getColumnList()) || StringUtils.isBlank(type)) {
|
||||
return "";
|
||||
}
|
||||
if(DmlType.INSERT.name().equalsIgnoreCase(type)) {
|
||||
return getInsertSql(table.getName(), table.getColumnList());
|
||||
} else if(DmlType.UPDATE.name().equalsIgnoreCase(type)) {
|
||||
return getUpdateSql(table.getName(), table.getColumnList());
|
||||
} else if(DmlType.DELETE.name().equalsIgnoreCase(type)) {
|
||||
return getDeleteSql(table.getName(), table.getColumnList());
|
||||
}else if(DmlType.SELECT.name().equalsIgnoreCase(type)) {
|
||||
return getSelectSql(table.getName(), table.getColumnList());
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private String getSelectSql(String name, List<TableColumn> columnList) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append("SELECT ");
|
||||
for (TableColumn column : columnList) {
|
||||
script.append(column.getName())
|
||||
.append(",");
|
||||
}
|
||||
script.deleteCharAt(script.length() - 1);
|
||||
script.append(" FROM where").append(name);
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
private String getDeleteSql(String name, List<TableColumn> columnList) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append("DELETE FROM ").append(name)
|
||||
.append(" where ");
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
private String getUpdateSql(String name, List<TableColumn> columnList) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append("UPDATE ").append(name)
|
||||
.append(" set ");
|
||||
for (TableColumn column : columnList) {
|
||||
script.append(column.getName())
|
||||
.append(" = ")
|
||||
.append(" ")
|
||||
.append(",");
|
||||
}
|
||||
script.deleteCharAt(script.length() - 1);
|
||||
script.append(" where ");
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
private String getInsertSql(String name, List<TableColumn> columnList) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append("INSERT INTO ").append(name)
|
||||
.append(" (");
|
||||
for (TableColumn column : columnList) {
|
||||
script.append(column.getName())
|
||||
.append(",");
|
||||
}
|
||||
script.deleteCharAt(script.length() - 1);
|
||||
script.append(") VALUES (");
|
||||
for (TableColumn column : columnList) {
|
||||
script.append(" ")
|
||||
.append(",");
|
||||
}
|
||||
script.deleteCharAt(script.length() - 1);
|
||||
script.append(")");
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
private List<String> getPrimaryColumns(List<Header> headerList) {
|
||||
if (CollectionUtils.isEmpty(headerList)) {
|
||||
return Lists.newArrayList();
|
||||
@ -132,7 +204,6 @@ public class DefaultSqlBuilder implements SqlBuilder {
|
||||
List<String> keyColumns) {
|
||||
StringBuilder script = new StringBuilder();
|
||||
script.append("DELETE FROM ").append(tableName).append("");
|
||||
|
||||
script.append(buildWhere(headerList, row, metaSchema, keyColumns));
|
||||
return script.toString();
|
||||
}
|
||||
@ -178,7 +249,7 @@ public class DefaultSqlBuilder implements SqlBuilder {
|
||||
return script.toString();
|
||||
}
|
||||
|
||||
private String getInsertSql(String tableName, List<Header> headerList, List<String> row, MetaData metaSchema) {
|
||||
private String getInsertSql(String tableName, List<Header> headerList, List<String> row, MetaData metaSchema) {
|
||||
if (CollectionUtils.isEmpty(row) || ObjectUtils.allNull(row.toArray())) {
|
||||
return "";
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
|
||||
|
||||
@ -17,7 +18,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Cell {
|
||||
public class Cell implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* cell type
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class Charset {
|
||||
public class Charset implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String charsetName;
|
||||
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class Collation {
|
||||
public class Collation implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String collationName;
|
||||
}
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class ColumnType {
|
||||
public class ColumnType implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private String typeName;
|
||||
private boolean supportLength;
|
||||
private boolean supportScale;
|
||||
|
@ -3,8 +3,11 @@ package ai.chat2db.spi.model;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class Command {
|
||||
public class Command implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* sql statement
|
||||
@ -35,6 +38,11 @@ public class Command {
|
||||
*/
|
||||
private String schemaName;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private String tableName;
|
||||
|
||||
/**
|
||||
*Page coding
|
||||
* Only available for select statements
|
||||
|
@ -6,6 +6,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : CreateTableSql.java
|
||||
@ -14,7 +16,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class CreateTableSql {
|
||||
public class CreateTableSql implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public String tableName;
|
||||
|
||||
|
@ -5,6 +5,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* Database connection object
|
||||
*
|
||||
@ -14,7 +16,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DataSourceConnect {
|
||||
public class DataSourceConnect implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* success flag
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class DefaultValue {
|
||||
public class DefaultValue implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String defaultValue;
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.Driver;
|
||||
|
||||
import ai.chat2db.spi.config.DriverConfig;
|
||||
@ -17,7 +18,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DriverEntry {
|
||||
public class DriverEntry implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private DriverConfig driverConfig;
|
||||
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class EngineType {
|
||||
public class EngineType implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private String name;
|
||||
private boolean supportTTL;
|
||||
private boolean supportSortOrder;
|
||||
|
@ -1,6 +1,8 @@
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
@ -16,7 +18,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ExecuteResult {
|
||||
public class ExecuteResult implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* success flag
|
||||
@ -102,7 +105,12 @@ public class ExecuteResult {
|
||||
private boolean canEdit;
|
||||
|
||||
/**
|
||||
* Table Name
|
||||
* Table Name for the result
|
||||
*/
|
||||
private String tableName;
|
||||
|
||||
/**
|
||||
* Extra information that can be used by the plugin
|
||||
*/
|
||||
private Map<String,Object> extra;
|
||||
}
|
||||
|
@ -7,6 +7,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : Function.java
|
||||
@ -15,7 +17,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Function {
|
||||
public class Function implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
//FUNCTION_CAT String => function catalog (may be null)
|
||||
//FUNCTION_SCHEM String => function schema (may be null)
|
||||
//FUNCTION_NAME String => function name. This is the name used to invoke the function
|
||||
|
@ -6,6 +6,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* cell header
|
||||
*
|
||||
@ -15,7 +17,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Header{
|
||||
public class Header implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
/**
|
||||
* cell type
|
||||
*
|
||||
|
@ -3,9 +3,12 @@ package ai.chat2db.spi.model;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class IndexType {
|
||||
public class IndexType implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -2,8 +2,11 @@ package ai.chat2db.spi.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class OrderBy {
|
||||
public class OrderBy implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* sort field
|
||||
|
@ -7,6 +7,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : Procedure.java
|
||||
@ -15,7 +17,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Procedure {
|
||||
public class Procedure implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
//PROCEDURE_CAT String => procedure catalog (may be null)
|
||||
//PROCEDURE_SCHEM String => procedure schema (may be null)
|
||||
//PROCEDURE_NAME String => procedure name
|
||||
|
@ -0,0 +1,16 @@
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class QueryResult implements Serializable {
|
||||
|
||||
private String tableName;
|
||||
private List<Header> headerList;
|
||||
private List<ResultOperation> operations;
|
||||
private Map<String, Object> extra;
|
||||
}
|
@ -2,10 +2,12 @@ package ai.chat2db.spi.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class ResultOperation {
|
||||
public class ResultOperation implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String type;
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
import lombok.Data;
|
||||
@ -10,7 +11,8 @@ import lombok.Data;
|
||||
* @version : SSHInfo.java
|
||||
*/
|
||||
@Data
|
||||
public class SSHInfo {
|
||||
public class SSHInfo implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Whether to use ssh
|
||||
|
@ -3,10 +3,13 @@ package ai.chat2db.spi.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : SSLInfo.java
|
||||
*/
|
||||
@Data
|
||||
public class SSLInfo {
|
||||
public class SSLInfo implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
}
|
@ -6,6 +6,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : ShowDatabaseResult.java
|
||||
@ -14,6 +16,7 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ShowDatabaseResult {
|
||||
public class ShowDatabaseResult implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
String database;
|
||||
}
|
@ -6,11 +6,14 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SimpleColumn {
|
||||
public class SimpleColumn implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Column name
|
||||
|
@ -6,11 +6,14 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SimpleTable {
|
||||
public class SimpleTable implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
/**
|
||||
* Table Name
|
||||
*/
|
||||
|
@ -5,6 +5,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* sql object
|
||||
*
|
||||
@ -14,7 +16,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Sql {
|
||||
public class Sql implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* sql
|
||||
|
@ -6,6 +6,7 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@ -17,7 +18,8 @@ import java.util.List;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Table {
|
||||
public class Table implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Table Name
|
||||
|
@ -6,6 +6,7 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -17,7 +18,8 @@ import java.util.Objects;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class TableColumn {
|
||||
public class TableColumn implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Old column, when modifying a column, you need this parameter
|
||||
|
@ -1,5 +1,6 @@
|
||||
package ai.chat2db.spi.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
@ -17,7 +18,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class TableIndex {
|
||||
public class TableIndex implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String oldName;
|
||||
|
||||
|
@ -7,6 +7,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* Column information
|
||||
*
|
||||
@ -16,7 +18,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class TableIndexColumn {
|
||||
public class TableIndexColumn implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Index name
|
||||
|
@ -3,11 +3,13 @@ package ai.chat2db.spi.model;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
public class TableMeta {
|
||||
public class TableMeta implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private List<ColumnType> columnTypes;
|
||||
|
||||
|
@ -6,6 +6,8 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : Trigger.java
|
||||
@ -14,7 +16,8 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Trigger {
|
||||
public class Trigger implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String databaseName;
|
||||
|
||||
|
@ -6,11 +6,14 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
@SuperBuilder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Type {
|
||||
public class Type implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@JsonAlias("TYPE_NAME")
|
||||
private String typeName;
|
||||
|
@ -1,26 +1,22 @@
|
||||
|
||||
package ai.chat2db.spi.sql;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import ai.chat2db.spi.DBManage;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.Plugin;
|
||||
import ai.chat2db.spi.SqlBuilder;
|
||||
import ai.chat2db.spi.config.DBConfig;
|
||||
import ai.chat2db.spi.config.DriverConfig;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* @author jipengfei
|
||||
* @version : Chat2DBContext.java
|
||||
@ -29,14 +25,15 @@ import org.apache.commons.lang3.StringUtils;
|
||||
public class Chat2DBContext {
|
||||
private static final ThreadLocal<ConnectInfo> CONNECT_INFO_THREAD_LOCAL = new ThreadLocal<>();
|
||||
|
||||
private static final Cache<String, ConnectInfo> CONNECT_INFO_CACHE = CacheBuilder.newBuilder()
|
||||
.maximumSize(100)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<String, ConnectInfo>) notification -> {
|
||||
if(notification.getValue()!=null){
|
||||
notification.getValue().close();
|
||||
}
|
||||
}).build();
|
||||
// private static final Cache<String, ConnectInfo> CONNECT_INFO_CACHE = CacheBuilder.newBuilder()
|
||||
// .maximumSize(1000)
|
||||
// .expireAfterAccess(5, TimeUnit.MINUTES)
|
||||
// .removalListener((RemovalListener<String, ConnectInfo>) notification -> {
|
||||
// if (notification.getValue() != null) {
|
||||
// System.out.println("remove connect info " + notification.getKey());
|
||||
// notification.getValue().close();
|
||||
// }
|
||||
// }).build();
|
||||
|
||||
public static Map<String, Plugin> PLUGIN_MAP = new ConcurrentHashMap<>();
|
||||
|
||||
@ -92,19 +89,24 @@ public class Chat2DBContext {
|
||||
public static Connection getConnection() {
|
||||
ConnectInfo connectInfo = getConnectInfo();
|
||||
Connection connection = connectInfo.getConnection();
|
||||
if (connection == null) {
|
||||
synchronized (connectInfo) {
|
||||
connection = connectInfo.getConnection();
|
||||
try {
|
||||
if (connection != null && !connection.isClosed()) {
|
||||
return connection;
|
||||
} else {
|
||||
try {
|
||||
if (connection == null || connection.isClosed()) {
|
||||
synchronized (connectInfo) {
|
||||
connection = connectInfo.getConnection();
|
||||
try {
|
||||
if (connection != null && !connection.isClosed()) {
|
||||
return connection;
|
||||
} else {
|
||||
connection = getDBManage().getConnection(connectInfo);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
connection = getDBManage().getConnection(connectInfo);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
connection = getDBManage().getConnection(connectInfo);
|
||||
connectInfo.setConnection(connection);
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
log.error("get connection error", e);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
@ -135,19 +137,12 @@ public class Chat2DBContext {
|
||||
* @param info
|
||||
*/
|
||||
public static void putContext(ConnectInfo info) {
|
||||
String key = info.getKey();
|
||||
ConnectInfo connectInfo = CONNECT_INFO_CACHE.getIfPresent(key);
|
||||
if (connectInfo != null) {
|
||||
CONNECT_INFO_THREAD_LOCAL.set(connectInfo);
|
||||
}else {
|
||||
DriverConfig config = info.getDriverConfig();
|
||||
if (config == null) {
|
||||
config = getDefaultDriverConfig(info.getDbType());
|
||||
info.setDriverConfig(config);
|
||||
}
|
||||
CONNECT_INFO_THREAD_LOCAL.set(info);
|
||||
CONNECT_INFO_CACHE.put(key, info);
|
||||
DriverConfig config = info.getDriverConfig();
|
||||
if (config == null) {
|
||||
config = getDefaultDriverConfig(info.getDbType());
|
||||
info.setDriverConfig(config);
|
||||
}
|
||||
CONNECT_INFO_THREAD_LOCAL.set(info);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -156,25 +151,13 @@ public class Chat2DBContext {
|
||||
public static void removeContext() {
|
||||
ConnectInfo connectInfo = CONNECT_INFO_THREAD_LOCAL.get();
|
||||
if (connectInfo != null) {
|
||||
// Connection connection = connectInfo.getConnection();
|
||||
// try {
|
||||
// if (connection != null && !connection.isClosed()) {
|
||||
// connection.close();
|
||||
// }
|
||||
// } catch (SQLException e) {
|
||||
// log.error("close connection error", e);
|
||||
// }
|
||||
//
|
||||
// Session session = connectInfo.getSession();
|
||||
// if (session != null && session.isConnected() && connectInfo.getSsh() != null
|
||||
// && connectInfo.getSsh().isUse()) {
|
||||
// try {
|
||||
// session.delPortForwardingL(Integer.parseInt(connectInfo.getSsh().getLocalPort()));
|
||||
// } catch (JSchException e) {
|
||||
// }
|
||||
// }
|
||||
connectInfo.close();
|
||||
CONNECT_INFO_THREAD_LOCAL.remove();
|
||||
}
|
||||
}
|
||||
|
||||
public static void close() {
|
||||
removeContext();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import ai.chat2db.server.tools.base.excption.BusinessException;
|
||||
import ai.chat2db.server.tools.common.util.EasyCollectionUtils;
|
||||
import ai.chat2db.server.tools.common.util.I18nUtils;
|
||||
import ai.chat2db.spi.CommandExecutor;
|
||||
import ai.chat2db.spi.MetaData;
|
||||
import ai.chat2db.spi.ValueHandler;
|
||||
import ai.chat2db.spi.enums.DataTypeEnum;
|
||||
import ai.chat2db.spi.enums.SqlTypeEnum;
|
||||
@ -39,7 +40,7 @@ import org.bson.Document;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Dbhub unified database connection management
|
||||
* Dbhub unified database connection management
|
||||
*
|
||||
* @author jipengfei
|
||||
*/
|
||||
@ -74,6 +75,7 @@ public class SQLExecutor implements CommandExecutor {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void execute(Connection connection, String sql, ResultSetConsumer consumer) {
|
||||
log.info("execute:{}", sql);
|
||||
try (Statement stmt = connection.createStatement()) {
|
||||
@ -113,10 +115,10 @@ public class SQLExecutor implements CommandExecutor {
|
||||
List<Header> headerList = Lists.newArrayListWithExpectedSize(col);
|
||||
for (int i = 1; i <= col; i++) {
|
||||
headerList.add(Header.builder()
|
||||
.dataType(JdbcUtils.resolveDataType(
|
||||
resultSetMetaData.getColumnTypeName(i), resultSetMetaData.getColumnType(i)).getCode())
|
||||
.name(ResultSetUtils.getColumnName(resultSetMetaData, i))
|
||||
.build());
|
||||
.dataType(JdbcUtils.resolveDataType(
|
||||
resultSetMetaData.getColumnTypeName(i), resultSetMetaData.getColumnType(i)).getCode())
|
||||
.name(ResultSetUtils.getColumnName(resultSetMetaData, i))
|
||||
.build());
|
||||
}
|
||||
headerConsumer.accept(headerList);
|
||||
|
||||
@ -144,13 +146,13 @@ public class SQLExecutor implements CommandExecutor {
|
||||
* @throws SQLException
|
||||
*/
|
||||
public ExecuteResult execute(final String sql, Connection connection, ValueHandler valueHandler)
|
||||
throws SQLException {
|
||||
throws SQLException {
|
||||
return execute(sql, connection, true, null, null, valueHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecuteResult executeUpdate(String sql, Connection connection, int n)
|
||||
throws SQLException {
|
||||
throws SQLException {
|
||||
Assert.notNull(sql, "SQL must not be null");
|
||||
log.info("execute:{}", sql);
|
||||
// connection.setAutoCommit(false);
|
||||
@ -160,29 +162,40 @@ public class SQLExecutor implements CommandExecutor {
|
||||
if (affectedRows != n) {
|
||||
executeResult.setSuccess(false);
|
||||
executeResult.setMessage("Update error " + sql + " update affectedRows = " + affectedRows
|
||||
+ ", Each SQL statement should update no more than one record. Please use a unique key for "
|
||||
+ "updates.");
|
||||
+ ", Each SQL statement should update no more than one record. Please use a unique key for "
|
||||
+ "updates.");
|
||||
// connection.rollback();
|
||||
}
|
||||
}
|
||||
return executeResult;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ExecuteResult> executeSelectTable(Command command) {
|
||||
MetaData metaData = Chat2DBContext.getMetaData();
|
||||
String tableName = metaData.getMetaDataName(command.getDatabaseName(), command.getSchemaName(),
|
||||
command.getTableName());
|
||||
String sql = "select * from " + tableName;
|
||||
command.setScript(sql);
|
||||
return execute(command);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Executes the given SQL query using the provided connection.
|
||||
* @param sql The SQL query to be executed.
|
||||
* @param connection The database connection to use for the query.
|
||||
*
|
||||
* @param sql The SQL query to be executed.
|
||||
* @param connection The database connection to use for the query.
|
||||
* @param limitRowSize Flag to indicate if row size should be limited.
|
||||
* @param offset The starting point of rows to fetch in the result set.
|
||||
* @param count The number of rows to fetch from the result set.
|
||||
* @param offset The starting point of rows to fetch in the result set.
|
||||
* @param count The number of rows to fetch from the result set.
|
||||
* @param valueHandler Handles the processing of the result set values.
|
||||
* @return ExecuteResult containing the result of the execution.
|
||||
* @throws SQLException If there is any SQL related error.
|
||||
*/
|
||||
public ExecuteResult execute(final String sql, Connection connection, boolean limitRowSize, Integer offset,
|
||||
Integer count, ValueHandler valueHandler)
|
||||
throws SQLException {
|
||||
Integer count, ValueHandler valueHandler)
|
||||
throws SQLException {
|
||||
Assert.notNull(sql, "SQL must not be null");
|
||||
log.info("execute:{}", sql);
|
||||
|
||||
@ -227,11 +240,11 @@ public class SQLExecutor implements CommandExecutor {
|
||||
continue;
|
||||
}
|
||||
String dataType = JdbcUtils.resolveDataType(
|
||||
resultSetMetaData.getColumnTypeName(i), resultSetMetaData.getColumnType(i)).getCode();
|
||||
resultSetMetaData.getColumnTypeName(i), resultSetMetaData.getColumnType(i)).getCode();
|
||||
headerList.add(Header.builder()
|
||||
.dataType(dataType)
|
||||
.name(name)
|
||||
.build());
|
||||
.dataType(dataType)
|
||||
.name(name)
|
||||
.build());
|
||||
}
|
||||
|
||||
// Get data information
|
||||
@ -271,16 +284,16 @@ public class SQLExecutor implements CommandExecutor {
|
||||
if (o instanceof Document document) {
|
||||
for (String string : document.keySet()) {
|
||||
headerListMap.computeIfAbsent(string, k -> Header.builder()
|
||||
.dataType("string")
|
||||
.name(string)
|
||||
.build());
|
||||
.dataType("string")
|
||||
.name(string)
|
||||
.build());
|
||||
row.put(string, Objects.toString(document.get(string)));
|
||||
}
|
||||
} else {
|
||||
headerListMap.computeIfAbsent("_unknown", k -> Header.builder()
|
||||
.dataType("string")
|
||||
.name("_unknown")
|
||||
.build());
|
||||
.dataType("string")
|
||||
.name("_unknown")
|
||||
.build());
|
||||
row.put("_unknown", Objects.toString(o));
|
||||
}
|
||||
}
|
||||
@ -393,12 +406,12 @@ public class SQLExecutor implements CommandExecutor {
|
||||
* @return
|
||||
*/
|
||||
public List<Table> tables(Connection connection, String databaseName, String schemaName, String tableName,
|
||||
String types[]) {
|
||||
String types[]) {
|
||||
|
||||
try {
|
||||
DatabaseMetaData metadata = connection.getMetaData();
|
||||
ResultSet resultSet = metadata.getTables(databaseName, schemaName, tableName,
|
||||
types);
|
||||
types);
|
||||
// If connection is mysql
|
||||
if ("MySQL".equalsIgnoreCase(metadata.getDatabaseProductName())) {
|
||||
// Get the comment of mysql table
|
||||
@ -438,10 +451,10 @@ public class SQLExecutor implements CommandExecutor {
|
||||
* @return
|
||||
*/
|
||||
public List<TableColumn> columns(Connection connection, String databaseName, String schemaName, String
|
||||
tableName,
|
||||
String columnName) {
|
||||
tableName,
|
||||
String columnName) {
|
||||
try (ResultSet resultSet = connection.getMetaData().getColumns(databaseName, schemaName, tableName,
|
||||
columnName)) {
|
||||
columnName)) {
|
||||
return ResultSetUtils.toObjectList(resultSet, TableColumn.class);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
@ -460,22 +473,22 @@ public class SQLExecutor implements CommandExecutor {
|
||||
public List<TableIndex> indexes(Connection connection, String databaseName, String schemaName, String tableName) {
|
||||
List<TableIndex> tableIndices = Lists.newArrayList();
|
||||
try (ResultSet resultSet = connection.getMetaData().getIndexInfo(databaseName, schemaName, tableName,
|
||||
false,
|
||||
false)) {
|
||||
false,
|
||||
false)) {
|
||||
List<TableIndexColumn> tableIndexColumns = ResultSetUtils.toObjectList(resultSet, TableIndexColumn.class);
|
||||
tableIndexColumns.stream().filter(c -> c.getIndexName() != null).collect(
|
||||
Collectors.groupingBy(TableIndexColumn::getIndexName)).entrySet()
|
||||
.stream().forEach(entry -> {
|
||||
TableIndex tableIndex = new TableIndex();
|
||||
TableIndexColumn column = entry.getValue().get(0);
|
||||
tableIndex.setName(entry.getKey());
|
||||
tableIndex.setTableName(column.getTableName());
|
||||
tableIndex.setSchemaName(column.getSchemaName());
|
||||
tableIndex.setDatabaseName(column.getDatabaseName());
|
||||
tableIndex.setUnique(!column.getNonUnique());
|
||||
tableIndex.setColumnList(entry.getValue());
|
||||
tableIndices.add(tableIndex);
|
||||
});
|
||||
Collectors.groupingBy(TableIndexColumn::getIndexName)).entrySet()
|
||||
.stream().forEach(entry -> {
|
||||
TableIndex tableIndex = new TableIndex();
|
||||
TableIndexColumn column = entry.getValue().get(0);
|
||||
tableIndex.setName(entry.getKey());
|
||||
tableIndex.setTableName(column.getTableName());
|
||||
tableIndex.setSchemaName(column.getSchemaName());
|
||||
tableIndex.setDatabaseName(column.getDatabaseName());
|
||||
tableIndex.setUnique(!column.getNonUnique());
|
||||
tableIndex.setColumnList(entry.getValue());
|
||||
tableIndices.add(tableIndex);
|
||||
});
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -491,7 +504,7 @@ public class SQLExecutor implements CommandExecutor {
|
||||
* @return List<Function>
|
||||
*/
|
||||
public List<ai.chat2db.spi.model.Function> functions(Connection connection, String databaseName,
|
||||
String schemaName) {
|
||||
String schemaName) {
|
||||
try (ResultSet resultSet = connection.getMetaData().getFunctions(databaseName, schemaName, null);) {
|
||||
return ResultSetUtils.toObjectList(resultSet, ai.chat2db.spi.model.Function.class);
|
||||
} catch (Exception e) {
|
||||
|
Reference in New Issue
Block a user