Commit a81d61db by wuchao

更改配置文件

parent 04ad88a4
...@@ -1174,6 +1174,8 @@ ...@@ -1174,6 +1174,8 @@
<artifactId>lombok</artifactId> <artifactId>lombok</artifactId>
</exclude> </exclude>
</excludes> </excludes>
<!-- 关键配置:将 system scope 的依赖打包进去 -->
<includeSystemScope>true</includeSystemScope>
</configuration> </configuration>
</plugin> </plugin>
</plugins> </plugins>
......
...@@ -578,9 +578,9 @@ public class EncryptionConfigController { ...@@ -578,9 +578,9 @@ public class EncryptionConfigController {
configContent.append("\r\n"); configContent.append("\r\n");
configContent.append("#gateway config"); configContent.append("#gateway config");
configContent.append("\r\n"); configContent.append("\r\n");
configContent.append("gateway.protocol=https"); configContent.append("gateway.protocol=http"); // 这里和下面第二行暂时改成 http
configContent.append("\r\n"); configContent.append("\r\n");
configContent.append("gateway.host=" + gatewayUrl.replace("https://", "").replace(":", "").replace(gatewayPort, "")); configContent.append("gateway.host=" + gatewayUrl.replace("http://", "").replace(":", "").replace(gatewayPort, ""));
configContent.append("\r\n"); configContent.append("\r\n");
configContent.append("gateway.port=" + gatewayPort); configContent.append("gateway.port=" + gatewayPort);
configContent.append("\r\n"); configContent.append("\r\n");
......
...@@ -1008,23 +1008,45 @@ public class SearchSensitiveDataController { ...@@ -1008,23 +1008,45 @@ public class SearchSensitiveDataController {
String schemaname = map.get("schemaname"); String schemaname = map.get("schemaname");
String dbType = map.get("dbType"); String dbType = map.get("dbType");
String dataSystemId = map.get("dataSystemId"); String dataSystemId = map.get("dataSystemId");
List<ComboboxVO> result = new ArrayList<>(); ArrayList<ComboboxVO> result = new ArrayList<ComboboxVO>();
try { try {
return RespHelper.successResp(result); if (dbType.equals("LOCALFILE")) {
} catch (Exception e) { TCoreDatasystem ta = this.dataSystemService.queryDataSystemById(dataSystemId);
return RespHelper.successResp(result); List<File> filelist = new ArrayList();
} finally{ Exception exception = null; switch (ta.getDbtype()) {
List<ComboboxVO> queryresult = new ArrayList<>(); case "LOCALFILE": {
filelist = FileTool.getFileList((String)ta.getLocaldir());
break;
}
default: {
result.add(new ComboboxVO("", ""));
}
}
for (File file : filelist) {
result.add(new ComboboxVO(file.toString(), file.toString()));
}
} else {
List list = this.dataSystemService.queryShemas(dbType, dataSystemId);
if (list.size() != 0) {
result.addAll(list);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
finally {
ArrayList<ComboboxVO> queryresult = new ArrayList<ComboboxVO>();
if (schemaname != null && !schemaname.equals("")) { if (schemaname != null && !schemaname.equals("")) {
ComboboxVO schemaValue = null; ComboboxVO schemaValue = null;
for (int i = 0; i < result.size(); i++) { for (int i = 0; i < result.size(); ++i) {
schemaValue = result.get(i); schemaValue = (ComboboxVO)result.get(i);
if (schemaValue.getValue().toLowerCase().contains(schemaname.toLowerCase())) { if (!schemaValue.getValue().toLowerCase().contains(schemaname.toLowerCase())) continue;
queryresult.add(schemaValue); queryresult.add(schemaValue);
} }
}
return RespHelper.successResp(queryresult); return RespHelper.successResp(queryresult);
} }
return RespHelper.successResp(result);
} }
} }
......
...@@ -1409,137 +1409,133 @@ public class TCoreEncryptionController { ...@@ -1409,137 +1409,133 @@ public class TCoreEncryptionController {
} }
private Map<String, Object> doLoadingAllTable(List<LoadingParameterVO> LoadingParameterList) { private Map<String, Object> doLoadingAllTable(List<LoadingParameterVO> LoadingParameterList) {
Map<String, Object> resultMap = new HashMap(); HashMap<String, Object> resultMap = new HashMap<String, Object>();
try { try {
int taskCount = 0; int taskCount = 0;
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
taskCount += loadingParameterVO.getTableInfo().size(); taskCount += loadingParameterVO.getTableInfo().size();
} }
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
String projectId = loadingParameterVO.getProjectId(); String projectId = loadingParameterVO.getProjectId();
String dataSystemId = loadingParameterVO.getDataSystemId(); String dataSystemId = loadingParameterVO.getDataSystemId();
Connection connection = loadingParameterVO.getConnection(); Connection connection = loadingParameterVO.getConnection();
TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem(); TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem();
String schema = loadingParameterVO.getSchema(); String schema = loadingParameterVO.getSchema();
List<TableInfoVO> tableInfo = loadingParameterVO.getTableInfo();
for(TableInfoVO tableInfoVO : loadingParameterVO.getTableInfo()) { for (TableInfoVO tableInfoVO : tableInfo) {
EncryptionConfigVO vo = new EncryptionConfigVO(); EncryptionConfigVO vo = new EncryptionConfigVO();
vo.setDatasystem_id(dataSystemId); vo.setDatasystem_id(dataSystemId);
vo.setSchema(schema); vo.setSchema(schema);
vo.setTable_name(tableInfoVO.getTableName()); vo.setTable_name(tableInfoVO.getTableName());
vo.setProjectid(projectId); vo.setProjectid(projectId);
List<ColumnInfoVO> allColumnList = this.encryptionConfigService.queryOriginalList(vo); List allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List<ColumnPrimaryKeyInfo> columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq("project_id", loadingParameterVO.getProjectId()), Restrictions.eq("datasystem_id", loadingParameterVO.getDataSystemId()), Restrictions.eq("tschema", loadingParameterVO.getSchema()), Restrictions.eq("tname", tableInfoVO.getTableName())}); List columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq((String)"project_id", (Object)loadingParameterVO.getProjectId()), Restrictions.eq((String)"datasystem_id", (Object)loadingParameterVO.getDataSystemId()), Restrictions.eq((String)"tschema", (Object)loadingParameterVO.getSchema()), Restrictions.eq((String)"tname", (Object)tableInfoVO.getTableName())});
List<ColumnInfoVO> allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName()); List allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
if ("MONGODB".equals(tCoreDatasystem.getDbtype())) { if ("MONGODB".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false)); this.taskExecutor.submit((Runnable)new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else if ("ES".equals(tCoreDatasystem.getDbtype())) { continue;
this.taskExecutor.submit(new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false, "0", (CountDownLatch)null)); }
} else if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) { if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false, "0", null));
continue;
}
if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName()); TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName());
if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) { if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) {
this.taskExecutor.submit(new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false)); this.taskExecutor.submit((Runnable)new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else { continue;
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} }
} else if ("HIVE".equals(tCoreDatasystem.getDbtype())) { this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
this.taskExecutor.submit(new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false)); continue;
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} }
if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
} }
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} }
} catch (Exception e) { }
}
catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} finally { }
Iterator var20 = LoadingParameterList.iterator(); finally {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
while(true) { if (null == loadingParameterVO.getConnection()) continue;
if (!var20.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var20.next();
if (null != loadingParameterVO.getConnection()) {
try { try {
loadingParameterVO.getConnection().close(); loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
} }
catch (SQLException e) {
e.printStackTrace();
} }
} }
} }
return resultMap;
} }
private Map<String, Object> doReloadingAllTable(List<LoadingParameterVO> LoadingParameterList) { private Map<String, Object> doReloadingAllTable(List<LoadingParameterVO> LoadingParameterList) {
Map<String, Object> resultMap = new HashMap(); HashMap<String, Object> resultMap = new HashMap<String, Object>();
try { try {
int taskCount = 0; int taskCount = 0;
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
taskCount += loadingParameterVO.getTableInfo().size(); taskCount += loadingParameterVO.getTableInfo().size();
} }
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
String projectId = loadingParameterVO.getProjectId(); String projectId = loadingParameterVO.getProjectId();
String dataSystemId = loadingParameterVO.getDataSystemId(); String dataSystemId = loadingParameterVO.getDataSystemId();
Connection connection = loadingParameterVO.getConnection(); Connection connection = loadingParameterVO.getConnection();
TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem(); TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem();
String schema = loadingParameterVO.getSchema(); String schema = loadingParameterVO.getSchema();
List<TableInfoVO> tableInfo = loadingParameterVO.getTableInfo();
for(TableInfoVO tableInfoVO : loadingParameterVO.getTableInfo()) { for (TableInfoVO tableInfoVO : tableInfo) {
EncryptionConfigVO vo = new EncryptionConfigVO(); EncryptionConfigVO vo = new EncryptionConfigVO();
vo.setDatasystem_id(dataSystemId); vo.setDatasystem_id(dataSystemId);
vo.setSchema(schema); vo.setSchema(schema);
vo.setTable_name(tableInfoVO.getTableName()); vo.setTable_name(tableInfoVO.getTableName());
vo.setProjectid(projectId); vo.setProjectid(projectId);
List<ColumnInfoVO> allColumnList = this.encryptionConfigService.queryOriginalList(vo); List allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List<ColumnPrimaryKeyInfo> columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq("project_id", loadingParameterVO.getProjectId()), Restrictions.eq("datasystem_id", loadingParameterVO.getDataSystemId()), Restrictions.eq("tschema", loadingParameterVO.getSchema()), Restrictions.eq("tname", tableInfoVO.getTableName())}); List columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq((String)"project_id", (Object)loadingParameterVO.getProjectId()), Restrictions.eq((String)"datasystem_id", (Object)loadingParameterVO.getDataSystemId()), Restrictions.eq((String)"tschema", (Object)loadingParameterVO.getSchema()), Restrictions.eq((String)"tname", (Object)tableInfoVO.getTableName())});
List<ColumnInfoVO> allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName()); List allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
if ("MONGODB".equals(tCoreDatasystem.getDbtype())) { if ("MONGODB".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true)); this.taskExecutor.submit((Runnable)new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} else if ("ES".equals(tCoreDatasystem.getDbtype())) { continue;
this.taskExecutor.submit(new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true, "0", (CountDownLatch)null)); }
} else if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) { if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true, "0", null));
continue;
}
if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName()); TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName());
if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) { if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) {
this.taskExecutor.submit(new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true)); this.taskExecutor.submit((Runnable)new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} else { continue;
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} }
} else if ("HIVE".equals(tCoreDatasystem.getDbtype())) { this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
this.taskExecutor.submit(new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false)); continue;
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} }
if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
} }
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} }
} catch (Exception e) { }
}
catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} finally { }
Iterator var20 = LoadingParameterList.iterator(); finally {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
while(true) { if (null == loadingParameterVO.getConnection()) continue;
if (!var20.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var20.next();
if (null != loadingParameterVO.getConnection()) {
try { try {
loadingParameterVO.getConnection().close(); loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
} }
catch (SQLException e) {
e.printStackTrace();
} }
} }
} }
return resultMap;
} }
private Map<String, Object> doLoadingBatchTable(List<LoadingParameterVO> LoadingParameterList) { private Map<String, Object> doLoadingBatchTable(List<LoadingParameterVO> LoadingParameterList) {
...@@ -2512,25 +2508,18 @@ public class TCoreEncryptionController { ...@@ -2512,25 +2508,18 @@ public class TCoreEncryptionController {
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
Iterator var28 = LoadingParameterList.iterator(); for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
if (null == loadingParameterVO.getConnection()) continue;
while(true) {
if (!var28.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var28.next();
if (null != loadingParameterVO.getConnection()) {
try { try {
loadingParameterVO.getConnection().close(); loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
} }
catch (SQLException e) {
e.printStackTrace();
} }
} }
} }
return resultMap;
} }
private boolean isTableInSchema(Connection conn, String schema, String table) throws SQLException { private boolean isTableInSchema(Connection conn, String schema, String table) throws SQLException {
......
...@@ -1086,7 +1086,7 @@ public class TdataProjectServiceImpl implements TdataProjectService { ...@@ -1086,7 +1086,7 @@ public class TdataProjectServiceImpl implements TdataProjectService {
this.tCoreProjectFindruleDao.save(obj); this.tCoreProjectFindruleDao.save(obj);
} }
this.tCoreDataProjectDao.save(project); this.tCoreDataProjectDao.update(project);
} }
public String queryEdtionIdByProId(String projectid) { public String queryEdtionIdByProId(String projectid) {
......
...@@ -2,11 +2,11 @@ ...@@ -2,11 +2,11 @@
#IP信息 #IP信息
#jdbc.host=192.168.2.127 #jdbc.host=192.168.2.127
#正式环境 #正式环境
jdbc.host=127.0.0.1 jdbc.host=demo.docmis.cn
#jdbc.host=192.168.2.228 #jdbc.host=192.168.2.228
#端口信息 #端口信息
#jdbc.port=54321 #jdbc.port=54321
jdbc.port=3306 jdbc.port=23500
#数据库实例 #数据库实例
jdbc.kingbase.dbname=security jdbc.kingbase.dbname=security
#jdbc.dbname=encryption_gateway #jdbc.dbname=encryption_gateway
...@@ -14,7 +14,8 @@ jdbc.kingbase.dbname=security ...@@ -14,7 +14,8 @@ jdbc.kingbase.dbname=security
#如果配置数据库类型是H2,则jdbc.dbname的值是public #如果配置数据库类型是H2,则jdbc.dbname的值是public
jdbc.dbname=public jdbc.dbname=public
#jdbc.dbname=encryption #jdbc.dbname=encryption
basePath=/home/trustz #basePath=/home/trustz
basePath=D:/trustz
maskingPath=/home/masking maskingPath=/home/masking
#jdbc-mysql #jdbc-mysql
#jdbc.driver=com.mysql.jdbc.Driver #jdbc.driver=com.mysql.jdbc.Driver
...@@ -167,7 +168,7 @@ loginfilepath = D:/login.properties ...@@ -167,7 +168,7 @@ loginfilepath = D:/login.properties
app_alert_interface = http://127.0.0.1:8180/core/applog/sendAlertInfo app_alert_interface = http://127.0.0.1:8180/core/applog/sendAlertInfo
db_alert_interface = http://127.0.0.1:8180/core/dblog/sendAlertInfo db_alert_interface = http://127.0.0.1:8180/core/dblog/sendAlertInfo
# 邮件接收人 # 邮件接收人
mailTo = shuyi666@aliyun.com mailTo = dhdhhd@aliyun.com
# 告警end # 告警end
# 审计日志begin # 审计日志begin
......
[\u6570\u636E\u6301\u4E45\u5316\u914D\u7F6E]
#IP信息
#jdbc.host=192.168.2.127
#正式环境
jdbc.host=demo.docmis.cn
#jdbc.host=192.168.2.228
#端口信息
#jdbc.port=54321
jdbc.port=23500
#数据库实例
jdbc.kingbase.dbname=security
#jdbc.dbname=encryption_gateway
#jdbc.dbname=encryption_king
#如果配置数据库类型是H2,则jdbc.dbname的值是public
jdbc.dbname=public
#jdbc.dbname=encryption
#basePath=/home/trustz
basePath=/home/nse-server/trustz
maskingPath=/home/masking
#jdbc-mysql
#jdbc.driver=com.mysql.jdbc.Driver
jdbc.driver= com.mysql.cj.jdbc.Driver
#jdbc.driver=dm.jdbc.driver.DmDriver
#jdbc.driver=org.h2.Driver
#jdbc-kingbase8
#jdbc.driver: com.kingbase8.Driver
#正式环境
#url-kingbase8
#jdbc.url=jdbc:kingbase8://${jdbc.host}:${jdbc.port}/${jdbc.kingbase.dbname}?currentSchema=${jdbc.dbname}
jdbc.url=jdbc:mysql://${jdbc.host}:${jdbc.port}/${jdbc.dbname}?useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=GMT%2B8
#jdbc.url=jdbc:dm://${jdbc.host}:${jdbc.port}?SCHEMA=${jdbc.dbname}&columnNameUpperCase=false&genKeyNameCase=2
#H2 database
#jdbc.url=jdbc:h2:d:/db/enc;AUTO_SERVER=TRUE
#jdbc.url=jdbc:h2:/home/trustz/data/db/enc;AUTO_SERVER=TRUE
#jdbc.user=sa
#jdbc.password=sa
#jdbc.user=SYSDBA
#jdbc.password=SYSDBA
jdbc.user=root
jdbc.password=!QAZ2wsx#EDC2022
#jdbc.user=system
#jdbc.password=system
jdbc.dialect=org.hibernate.dialect.MySQL5Dialect
#jdbc.dialect=org.hibernate.dialect.DmDialect
#jdbc.dialect=org.hibernate.dialect.H2Dialect
[\u5168\u5C40\u53C2\u6570]
#license\u5B58\u653E\u8DEF\u5F84
licensedir=${basePath}/data/infa_file/lic
#licensedir=D:/lic
#\u662F\u5426\u9700\u8981licenseKey 0:\u4E0D\u9700\u8981\u9A8C\u8BC1 1\uFF1A\u9700\u8981\u9A8C\u8BC1
isNeedLicenseKey=0
#错误登录允许次数
login_limit_num=5
#平台jar包部署路径
deployPath=${basePath}/data
#系统版本号
sys_version=v2.9
[\u65E5\u5FD7\u5C5E\u6027]
#\u8BBE\u7F6E\u65E5\u5FD7\u6587\u4EF6\u8DEF\u5F84
logpath = ./loginfo
#logpath = ${basePath}/data/loginfo
#\u83B7\u53D6\u53D1\u73B0\u8FC7\u7A0B\u65E5\u5FD7\u8DEF\u5F84
tasklogger = ${maskingPath}/app/tasklogger/
#\u53D1\u73B0\u7ED3\u679C\u5BFC\u51FA\u8DEF\u5F84
discoveryResultDir=${basePath}/data/infa_project/export/
#\u53D1\u73B0\u4EFB\u52A1\u7248\u672C\u8DEF\u5F84
#TaskVersion=D:\\version\\TaskVersion\\
#ProjectVersion=D:\\version\\ProjectVersion\\
#AllVersion=D:\\version\\AllVersion\\
TaskVersion=${basePath}/data/infa_project/TaskVersion/
ProjectVersion=${basePath}/data/infa_project/ProjectVersion/
AllVersion=${basePath}/data/infa_project/AllVersion/
CustomVersion=${basePath}/data/infa_project/CustomVersion/
#\u7528\u4E8E\u56DE\u64A4\u7684\u5907\u4EFDsql\u8DEF\u5F84
ResetResult=${basePath}/data/infa_project/ResetResult/
#\u53D1\u73B0\u4EFB\u52A1\u76D1\u63A7\u8DEF\u5F84
#monitorFile=D:\\work\\ghca 2.0\\file\\
monitorFile=${basePath}/data/infa_project/monitorFile/
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u7ED3\u679C\u9875\u662F\u5426\u9700\u8981\u663E\u793A\u5339\u914D\u7387(0-\u4E0D\u663E\u793A\uFF1B1-\u663E\u793A)
isRate=1
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u7ED3\u679C\u9875\u5339\u914D\u7387\u663E\u793A\u6761\u6570
rateNum=10
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u6267\u884C\u5339\u914D\u548C\u4E0D\u5339\u914D\u6587\u4EF6\u5B58\u653E\u5730\u5740
#rateDir=D:\\work\\ghca 2.0\\rate\\
rateDir=${basePath}/data/infa_project/rate/
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u76F8\u5173\u6027\u7B97\u6CD5\u7F6E\u4FE1\u5EA6\u663E\u793A\u5927\u4E8E60%
confidenceRate=0.6
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u65E0\u7ED3\u6784\u6587\u4EF6\u56FE\u7247\u53D1\u73B0\uFF0C\u4E0B\u8F7D\u672C\u5730\u8DEF\u5F84\uFF08FTP/SFTP\uFF09
pictureDir=${basePath}/data/infa_project/pic/
#发现任务 一次查询的条数
readCntByOneTime=10000
#发现任务 JDBC一次插入的条数
jdbc_insert_batche_size=10000
#是否为重复数据
isrepeatvalue=0.1
#excelModelDown=D:\\masking\\
#数据域-发现规则导入模板
#dataAreaExportTemplate=E:/
dataAreaExportTemplate=${basePath}/data/infa_file/dataAreaExportTemplate/
[OTHERS]
#要导出的数据库名称
#jdbc.exportDatabaseName=encryption_test
jdbc.exportDatabaseName=${jdbc.dbname}
#要导入的数据库所在路径
jdbc.importPath=${basePath}/data/importMysqlSql/backup.sql
#mysql下的bin文件的路径
#MysqlPath=C:\\Program Files (x86)\\MySQL\\MySQL Server 5.5\\bin\\
MysqlPath=/usr/bin/
#MysqlPath=C:\\Program Files (x86)\\Mysql\\mysql-8.0.23-winx64\\bin\\
#导出的数据库存放路径
jdbc.exportPath=${basePath}/data/exportMysqlSql/backup.sql
#jdbc.exportPath=D:/ghca/data/exportMysqlSql/backup.sql
# 要导入的目标数据库
jdbc.importDatabaseName=${jdbc.dbname}
#jdbcproxypath = E:\\IdeaProjects\\p6-encryption-pom\\target\\encryption-proxy-jar-with-dependencies.jar
#jdbcproxypath = C:\\work\\repo\\com\\ghca\\encryption-proxy\\1.0\\encryption-proxy-1.0-jar-with-dependencies.jar
jdbcproxypath = ${basePath}/data/encryption/encryption-proxy.jar
# udf 目录
udfproxypath = ${basePath}/data/udf/
# sqlserverudf 文件位置
sqlserverudfpath = ${basePath}/data/udf/sqlserver-udf.dll
odbcproxypath = ${basePath}/data/encryption/masking_odbc.zip
jdbcmaskingfilename = encryption.properties
desensitizationStrategyfilename = masking.properties
#数据库连接文件的存储路径
databaselink = ${basePath}/data
#exceptionSqlPath = D:/exceptionSql/
exceptionSqlPath = ${basePath}/data/exceptionSql/
# 轮询自检下载文件路径
#exceptionKmLogPath = D:/kmlog/
#轮询自检 下载文件地址
exceptionKmLogPath = ${basePath}/data/kmlog/
logDetail = logDetail.log
#加密机SDKKeyId index SM2内部加密专用 SM4不需要
#SDKKeyId:110 是密钥索引 SDKPassword:pass是私钥授权码
SDKKeyId = 110
#加密机password SM2内部加密专用 SM4不需要
SDKPassword = pass
SDKPath = ${basePath}/data/HsmConfig.properties
#SDKPath = E:/HsmConfig/HsmConfig.properties
copySuffix = _copy
initialSuffix = _initial
cipherSuffix = _cipher
digestSuffix = _enc
checkNum = 100
corePoolSize = 4
maxPoolSize = 4
#线程池最大队列数
queueCapacity = 4
#oceanbase最大提交条数
oceanbasecount = 3
encbatchcount = 10000
#ES等待时间
esWaitTime = 2000
loginfilepath = D:/login.properties
# 告警begin
app_alert_interface = http://127.0.0.1:8180/core/applog/sendAlertInfo
db_alert_interface = http://127.0.0.1:8180/core/dblog/sendAlertInfo
# 邮件接收人
mailTo = abcld@aliyun.com
# 告警end
# 审计日志begin
# app审计日志存储路径\app规则匹配日志存储路径
#appserverlogpath = E:/app/
appserverlogpath = ${maskingPath}/app/appdata/
#审计日志转syslog
#syslog4auditserverlogpath = E:/s4a
syslog4auditserverlogpath = ${maskingPath}/s4a/s4adata/
# db 访问日志存储路径
#dbaccesslogpath = E:/syslog/access/
dbaccesslogpath = ${maskingPath}/app/syslog/access/
# db 规则匹配日志存储路径
#dbrulelogpath = E:/syslog/rule/
dbrulelogpath = ${maskingPath}/app/syslog/rule/
# 语句类型
auditlog_type = SELECT,UPDATE,DELETE
# app审计服务地址
applogServerHost = 127.0.0.1
applogServerPort = 32376
# db审计服务地址
dblogServerHost = 127.0.0.1
dblogServerPort = 40000
#审计日志转syslog
s4aServerHost = 127.0.0.1
s4aServerPost = 40000
# 审计日志end
# 数据源字符编码
oraclecharset = AL32UTF8,ZHS16GBK
mysqlcharset = UTF8,GBK
postgresqlcharset = UTF8,GBK
hivecharset = UTF8,BINARY
mssqlservercharset = UTF8,GBK
db2charset = Unicode,UCS-2,UTF16,UTF8
mariadbcharset = UTF8,BINARY
informixcharset = UTF8,819
dmcharset = GB18030,UTF8,EUC-KR
sybasecharset = UTF8,GBK
sybaseiqcharset = UTF8,GBK
gbasecharset = UTF8,GBK
kingbasecharset = UTF8,GBK
oscarcharset = UTF8,GBK
uxdbcharset = UTF8,GBK
greenplumcharset = UTF8,GBK
teradatacharset = UTF8,GBK
highgocharset = UTF8,GBK
cachecharset = UTF8,GBK
kakfacharset = UTF8,GBK
KAFKAcharset = UTF8,GBK
hadoopcharset = UTF8,GBK
hbasecharset = UTF8,GBK
gausscharset = UTF8,GBK
hive_tdhcharset = UTF8,GBK
mongodbcharset = UTF8,GBK
escharset = UTF8,GBK
oceanbasecharset = UTF8,GBK
gaussdbcharset = UTF8,GBK
#like是否执行按照单个字符进行加密开关。1表示按照单个字符进行加密。0表示按照字符串整体进行加密。 默认不开启为0
encryption.like.open = 1
#单个字符加密后连接起来用某个字符进行分割
encryption.like.split = #
#加密标识
encryption.label = jm_
#加密标识后缀
encryption.label.suffix = _jm
#加密标识
encryption.es.label = jmjzrjjm
#程序中一切跟配置库相关的操作 默认是mysql 为0。peizhikudbtype 为1时 代表 人大金仓 peizhikudbtype 为2时 代表达梦配置库 为3时 代表h2database配置库
peizhikudbtype = 0
#是否开启操作日志记录
isSaveServerClientLog=false
#加密卡秘钥配置 x,y分量可以通过导出公玥接口每次获取 M是密钥明文的哈希值,C是密钥的密文 L 向量长度是16字节的
ECCCipher.bits=0L
ECCCipher.x=0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -83, 42, -40, -13, -22, -108, 70, 42, 123, 40, 88, -37, 76, 90, 105, 55, 46, -59, -3, -43, 71, -120, 104, -84, 42, -77, 94, -112, 45, -93, 58, -45
ECCCipher.y=0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -65, 18, -33, -28, -99, 39, -126, 89, 37, -15, -82, -46, -25, 40, 2, -44, 71, 102, 30, 113, -63, -78, -89, -55, -52, 38, 61, -109, 4, -122, 38, -30
ECCCipher.M=0, 27, 28, 35, 90, -3, -128, 76, -117, -31, 83, 84, 46, 61, -97, -92, 120, -19, 46, -121, -27, -107, -58, 90, 118, 0, 6, 105, 68, -41, 96, 30
ECCCipher.C=-18, 92, 111, -108, 103, -78, 28, 59, 67, -5, -101, -38, 40, 115, -72, 71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
ECCCipher.L=16
#加密卡句柄获取权限密码配置
Ccore.PhKeyHandle.password=11111111
#加密校验定时任务时间
digestperiod=5
#加密校验秘钥
digestkey=ghca
#加密/解密生成copy列,列最大长度设置
encryption.field.max=10000
#mongodb主键
encryption.mongodbpk=_id
#达梦拼sql
dmschema=encryption
#导入导出 0 excel 1 加密txt
is_enc_backupfile=1
#加密txt秘钥
is_enc_backupfile_key=6C603060274AED6FFB93EDB40E23E173
#0 拉取当前项目下的加密配置信息 1 拉取平台下 所有项目的配置信息
is_pull_all=0
#保存旧日志路径
oldlogpath=${basePath}/data/systemlog
#系统控制台路径
commandpath=${basePath}/data/commandlog
#json加密标识
jsonkey=jsonkey
#三未JCE接口配置文件
swsdsPath=${basePath}/jdk_dev
#是否依赖历史数据加密(0.不需要加密历史数据 1.必须加密历史数据 )
is_encrypted=1
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论