Commit 215efb32 by 周海峰

Merge branch 'master' of https://code.palacesun.com/wuchao/Nse

parents 9627a4f5 c477d22c
......@@ -1119,7 +1119,11 @@
<scope>system</scope>
<systemPath>${project.basedir}/src/main/resources/bin/swxajce-1.0.jar</systemPath>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.2</version>
</dependency>
</dependencies>
<build>
......@@ -1149,6 +1153,8 @@
<artifactId>lombok</artifactId>
</exclude>
</excludes>
<!-- 关键配置:将 system scope 的依赖打包进去 -->
<includeSystemScope>true</includeSystemScope>
</configuration>
</plugin>
</plugins>
......
......@@ -578,9 +578,9 @@ public class EncryptionConfigController {
configContent.append("\r\n");
configContent.append("#gateway config");
configContent.append("\r\n");
configContent.append("gateway.protocol=https");
configContent.append("gateway.protocol=http"); // 这里和下面第二行暂时改成 http
configContent.append("\r\n");
configContent.append("gateway.host=" + gatewayUrl.replace("https://", "").replace(":", "").replace(gatewayPort, ""));
configContent.append("gateway.host=" + gatewayUrl.replace("http://", "").replace(":", "").replace(gatewayPort, ""));
configContent.append("\r\n");
configContent.append("gateway.port=" + gatewayPort);
configContent.append("\r\n");
......
......@@ -1008,23 +1008,45 @@ public class SearchSensitiveDataController {
String schemaname = map.get("schemaname");
String dbType = map.get("dbType");
String dataSystemId = map.get("dataSystemId");
List<ComboboxVO> result = new ArrayList<>();
ArrayList<ComboboxVO> result = new ArrayList<ComboboxVO>();
try {
return RespHelper.successResp(result);
} catch (Exception e) {
return RespHelper.successResp(result);
} finally{ Exception exception = null;
List<ComboboxVO> queryresult = new ArrayList<>();
if (dbType.equals("LOCALFILE")) {
TCoreDatasystem ta = this.dataSystemService.queryDataSystemById(dataSystemId);
List<File> filelist = new ArrayList();
switch (ta.getDbtype()) {
case "LOCALFILE": {
filelist = FileTool.getFileList((String)ta.getLocaldir());
break;
}
default: {
result.add(new ComboboxVO("", ""));
}
}
for (File file : filelist) {
result.add(new ComboboxVO(file.toString(), file.toString()));
}
} else {
List list = this.dataSystemService.queryShemas(dbType, dataSystemId);
if (list.size() != 0) {
result.addAll(list);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
finally {
ArrayList<ComboboxVO> queryresult = new ArrayList<ComboboxVO>();
if (schemaname != null && !schemaname.equals("")) {
ComboboxVO schemaValue = null;
for (int i = 0; i < result.size(); i++) {
schemaValue = result.get(i);
if (schemaValue.getValue().toLowerCase().contains(schemaname.toLowerCase())) {
for (int i = 0; i < result.size(); ++i) {
schemaValue = (ComboboxVO)result.get(i);
if (!schemaValue.getValue().toLowerCase().contains(schemaname.toLowerCase())) continue;
queryresult.add(schemaValue);
}
}
return RespHelper.successResp(queryresult);
}
return RespHelper.successResp(result);
}
}
......
......@@ -1409,137 +1409,133 @@ public class TCoreEncryptionController {
}
private Map<String, Object> doLoadingAllTable(List<LoadingParameterVO> LoadingParameterList) {
Map<String, Object> resultMap = new HashMap();
HashMap<String, Object> resultMap = new HashMap<String, Object>();
try {
int taskCount = 0;
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
taskCount += loadingParameterVO.getTableInfo().size();
}
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
String projectId = loadingParameterVO.getProjectId();
String dataSystemId = loadingParameterVO.getDataSystemId();
Connection connection = loadingParameterVO.getConnection();
TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem();
String schema = loadingParameterVO.getSchema();
for(TableInfoVO tableInfoVO : loadingParameterVO.getTableInfo()) {
List<TableInfoVO> tableInfo = loadingParameterVO.getTableInfo();
for (TableInfoVO tableInfoVO : tableInfo) {
EncryptionConfigVO vo = new EncryptionConfigVO();
vo.setDatasystem_id(dataSystemId);
vo.setSchema(schema);
vo.setTable_name(tableInfoVO.getTableName());
vo.setProjectid(projectId);
List<ColumnInfoVO> allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List<ColumnPrimaryKeyInfo> columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq("project_id", loadingParameterVO.getProjectId()), Restrictions.eq("datasystem_id", loadingParameterVO.getDataSystemId()), Restrictions.eq("tschema", loadingParameterVO.getSchema()), Restrictions.eq("tname", tableInfoVO.getTableName())});
List<ColumnInfoVO> allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
List allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq((String)"project_id", (Object)loadingParameterVO.getProjectId()), Restrictions.eq((String)"datasystem_id", (Object)loadingParameterVO.getDataSystemId()), Restrictions.eq((String)"tschema", (Object)loadingParameterVO.getSchema()), Restrictions.eq((String)"tname", (Object)tableInfoVO.getTableName())});
List allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
if ("MONGODB".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false, "0", (CountDownLatch)null));
} else if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
}
if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false, "0", null));
continue;
}
if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName());
if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) {
this.taskExecutor.submit(new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
this.taskExecutor.submit((Runnable)new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
}
} else if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
}
if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
}
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
}
} catch (Exception e) {
}
}
catch (Exception e) {
e.printStackTrace();
} finally {
Iterator var20 = LoadingParameterList.iterator();
while(true) {
if (!var20.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var20.next();
if (null != loadingParameterVO.getConnection()) {
}
finally {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
if (null == loadingParameterVO.getConnection()) continue;
try {
loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
}
catch (SQLException e) {
e.printStackTrace();
}
}
}
return resultMap;
}
private Map<String, Object> doReloadingAllTable(List<LoadingParameterVO> LoadingParameterList) {
Map<String, Object> resultMap = new HashMap();
HashMap<String, Object> resultMap = new HashMap<String, Object>();
try {
int taskCount = 0;
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
taskCount += loadingParameterVO.getTableInfo().size();
}
for(LoadingParameterVO loadingParameterVO : LoadingParameterList) {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
String projectId = loadingParameterVO.getProjectId();
String dataSystemId = loadingParameterVO.getDataSystemId();
Connection connection = loadingParameterVO.getConnection();
TCoreDatasystem tCoreDatasystem = loadingParameterVO.gettCoreDatasystem();
String schema = loadingParameterVO.getSchema();
for(TableInfoVO tableInfoVO : loadingParameterVO.getTableInfo()) {
List<TableInfoVO> tableInfo = loadingParameterVO.getTableInfo();
for (TableInfoVO tableInfoVO : tableInfo) {
EncryptionConfigVO vo = new EncryptionConfigVO();
vo.setDatasystem_id(dataSystemId);
vo.setSchema(schema);
vo.setTable_name(tableInfoVO.getTableName());
vo.setProjectid(projectId);
List<ColumnInfoVO> allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List<ColumnPrimaryKeyInfo> columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq("project_id", loadingParameterVO.getProjectId()), Restrictions.eq("datasystem_id", loadingParameterVO.getDataSystemId()), Restrictions.eq("tschema", loadingParameterVO.getSchema()), Restrictions.eq("tname", tableInfoVO.getTableName())});
List<ColumnInfoVO> allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
List allColumnList = this.encryptionConfigService.queryOriginalList(vo);
List columnPrimaryKeyInfoList = this.columnPrimaryKeyInfoDao.queryAll(new Criterion[]{Restrictions.eq((String)"project_id", (Object)loadingParameterVO.getProjectId()), Restrictions.eq((String)"datasystem_id", (Object)loadingParameterVO.getDataSystemId()), Restrictions.eq((String)"tschema", (Object)loadingParameterVO.getSchema()), Restrictions.eq((String)"tname", (Object)tableInfoVO.getTableName())});
List allColumns = this.maksingAppDataRuleService.queryColumnInfo(dataSystemId, schema, tableInfoVO.getTableName());
if ("MONGODB".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} else if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true, "0", (CountDownLatch)null));
} else if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new MongoDBLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
continue;
}
if ("ES".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new ElasticSearchTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true, "0", null));
continue;
}
if ("HIVE_TDH".equals(tCoreDatasystem.getDbtype())) {
TableRuleInfo hiveStoreInfo = this.tableRuleService.getHiveStoreInfo(projectId, dataSystemId, schema, tableInfoVO.getTableName());
if (hiveStoreInfo != null && hiveStoreInfo.getType() == HiveEncDecType.webHdfs) {
this.taskExecutor.submit(new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
this.taskExecutor.submit((Runnable)new HdfsLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
continue;
}
} else if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit(new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
} else {
this.taskExecutor.submit(new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
continue;
}
if ("HIVE".equals(tCoreDatasystem.getDbtype())) {
this.taskExecutor.submit((Runnable)new HiveLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, false));
continue;
}
this.taskExecutor.submit((Runnable)new NewLoadingTask(projectId, dataSystemId, tCoreDatasystem, schema, tableInfoVO.getTableName(), tableInfoVO.getPrimaryKeys(), tableInfoVO.getColumnInfo(), allColumns, columnPrimaryKeyInfoList, allColumnList, true));
}
} catch (Exception e) {
}
}
catch (Exception e) {
e.printStackTrace();
} finally {
Iterator var20 = LoadingParameterList.iterator();
while(true) {
if (!var20.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var20.next();
if (null != loadingParameterVO.getConnection()) {
}
finally {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
if (null == loadingParameterVO.getConnection()) continue;
try {
loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
}
catch (SQLException e) {
e.printStackTrace();
}
}
}
return resultMap;
}
private Map<String, Object> doLoadingBatchTable(List<LoadingParameterVO> LoadingParameterList) {
......@@ -2512,25 +2508,18 @@ public class TCoreEncryptionController {
} catch (Exception e) {
e.printStackTrace();
} finally {
Iterator var28 = LoadingParameterList.iterator();
while(true) {
if (!var28.hasNext()) {
;
} else {
LoadingParameterVO loadingParameterVO = (LoadingParameterVO)var28.next();
if (null != loadingParameterVO.getConnection()) {
for (LoadingParameterVO loadingParameterVO : LoadingParameterList) {
if (null == loadingParameterVO.getConnection()) continue;
try {
loadingParameterVO.getConnection().close();
} catch (SQLException e) {
e.printStackTrace();
}
}
catch (SQLException e) {
e.printStackTrace();
}
}
}
return resultMap;
}
private boolean isTableInSchema(Connection conn, String schema, String table) throws SQLException {
......
......@@ -1086,7 +1086,7 @@ public class TdataProjectServiceImpl implements TdataProjectService {
this.tCoreProjectFindruleDao.save(obj);
}
this.tCoreDataProjectDao.save(project);
this.tCoreDataProjectDao.update(project);
}
public String queryEdtionIdByProId(String projectid) {
......
spring:
servlet:
multipart:
max-file-size:
104857600
max-request-size:
104857600
datasource:
type: com.alibaba.druid.pool.DruidDataSource
druid:
# mysql 账号密码
username: root
password: '!QAZ2wsx#EDC2022'
# kingbase 账号密码
# username: system
# password: system
# 达梦 账号密码
# username: SYSDBA
# password: 123123123
#H2SQL
# username: sa
# password: sa
# oracle 驱动
#driverClassName: oracle.jdbc.driver.OracleDriver
#url: jdbc:oracle:thin:@localhost:1521:orcl
# mysql 驱动
driver-class-name: com.mysql.cj.jdbc.Driver
# kingbase 驱动
# driver-class-name: com.kingbase8.Driver
# 达梦 驱动
# driver-class-name: dm.jdbc.driver.DmDriver
# HSSQL 驱动
# driver-class-name: org.h2.Driver
#serverTimezone=UTC 设置时区
# 达梦数据库 url
# url: jdbc:dm://192.168.2.228:5236?SCHEMA=encryption&columnNameUpperCase=false&genKeyNameCase=2&clobAsString=1&blobAsString=1
# H2SQL 数据库
# url: jdbc:h2:tcp://localhost/d:/db/enc;AUTO_SERVER=TRUE
# url: jdbc:h2:C:\\Users\\15613\\Desktop\\fsdownload\\data\\db
# url: jdbc:h2:./db
# kingbase数据库 urlcolumnNameUpperCase
# url: jdbc:kingbase8://192.168.2.127:54321/encryption_gateway?currentSchema=encryption
# mysql数据库 url
url: jdbc:mysql://demo.docmis.cn:23500/nse_test?useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=GMT%2B8&rewriteBatchedStatements=true
#正式环境
# url: jdbc:mysql://127.0.0.1:3306/encryption?useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=GMT%2B8&rewriteBatchedStatements=true
#最小连接数
minIdLe: 5
#最大连接数
maxActive: 100
#解决mysql8小时问题
validationQuery: SELECT 'X'
#空闲链接最小空闲时间
minEvictableIdleTimeMillis: 30000
#空闲连接检查时间间隔
timeBetweenEvictionRunsMillis: 60000
maxWait: 10000
login:
password:
username:
#ORM映射设置,hibernate需要配置jpa
jpa:
hibernate:
#是否自动建表m
ddl-auto: update
properties:
hibernate:
#是否自动打印hql对应的sql语句
show_sql: false
#是否格式化sql语句
format_sql: fasle
#事务交由spring管理
current_session_context_class: org.springframework.orm.hibernate5.SpringSessionContext
cache:
use_second_level_cache: false
use_query_cache: false
jackson:
date-format: yyyy-MM-dd HH:mm:ss
time-zone: GMT+8
serialization:
write-dates-as-timestamps: false
# 初始化大小,最小,最大
#告警设置-邮件通知 springboot自动加载必须属性,设置空,实际从数据库获取值
#SMTP服务器地址
mail:
host:
#用户名
username:
#密码
password:
mvc:
static-path-pattern: /**
axios:
defaults:
withCredentials: true
h2:
console:
enabled: true
settings:
trace: true
# web-allow-others: false
# path: /console
server:
port: 9005
tomcat:
max-swallow-size: 10485760
# ssl:
# key-store: classpath:tomcat.keystore
# key-store-password: syghca
# key-store-type: jks
# key-alias: tomcat
# enabled-protocols: TLSV1.2
# ciphers: ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-CHACHA20-POLY1305,ECDHE-RSA-CHACHA20-POLY1305,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256
incencryption:
thread:
pool:
core-size: 10
max-size: 10
encryption:
#base64随机数/格尔kms/泰和kms/加密机/国芯加密卡
#Local/KMS/THKMS/SDK/CCORESDF
secret-key-mode: Local
thkms:
protocol: http
host: 10.160.30.130
port: 10006
appId: APP_85B7665094BE4996A71A517E39C1D2B5
keyId: 0303b22f135644ec86023bf62cfe813f
#httpclient_config
httpclient:
connectionRequestTimeout: 30000
connectTimeout: 300000
socketTimeout: 600000
mybatis-plus:
datasource: second-datasource
# 如果是放在src/main/java目录下 classpath:/com/mistra/axela/*/mapper/*Mapper.xml
# 如果是放在resource目录 classpath:/mapper/*Mapper.xml
mapper-locations: classpath:/mapper/*.xml
# mapper-locations: classpath:/com/performancemonitor/framework/mybatis/xml/*.xml
global-config:
#主键类型 0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
id-type: 2
#字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
field-strategy: 2
#驼峰下划线转换
db-column-underline: true
#刷新mapper 调试神器
#refresh-mapper: true
#数据库大写下划线转换
#capital-mode: true
# Sequence序列接口实现类配置
key-generator: com.baomidou.mybatisplus.incrementer.OracleKeyGenerator
#逻辑删除配置(下面3个配置)
logic-delete-value: 1
logic-not-delete-value: 0
db-config:
schema: ${base.schema}
# for dm "\"%s\""
# column-format: "\"%s\""
column-format: "\"%s\""
sql-injector: com.baomidou.mybatisplus.mapper.LogicSqlInjector
configuration:
map-underscore-to-camel-case: true
auto-mapping-behavior: FULL
cache-enabled: false
#配置JdbcTypeForNull
jdbc-type-for-null: 'null'
log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
#分类分级http协议
protocol:
prex: http
classify:
#是否开启分类分级标签
classifyEnable: false
#分类分级IP
classifyIp: 127.0.0.1
#分类分级端口
classifyPort: 8182
url: ${protocol.prex}://${classify.classifyIp}:${classify.classifyPort}
#分类接口地址
classifyName: /core/datascope/getscopetable
#分级接口地址
levelName: /core/sensitivelevel/getscopetable
......@@ -2,11 +2,11 @@
#IP信息
#jdbc.host=192.168.2.127
#正式环境
jdbc.host=127.0.0.1
jdbc.host=demo.docmis.cn
#jdbc.host=192.168.2.228
#端口信息
#jdbc.port=54321
jdbc.port=3306
jdbc.port=23500
#数据库实例
jdbc.kingbase.dbname=security
#jdbc.dbname=encryption_gateway
......@@ -14,7 +14,8 @@ jdbc.kingbase.dbname=security
#如果配置数据库类型是H2,则jdbc.dbname的值是public
jdbc.dbname=public
#jdbc.dbname=encryption
basePath=/home/trustz
#basePath=/home/trustz
basePath=D:/trustz
maskingPath=/home/masking
#jdbc-mysql
#jdbc.driver=com.mysql.cj.jdbc.Driver
......@@ -167,7 +168,7 @@ loginfilepath = D:/login.properties
app_alert_interface = http://127.0.0.1:8180/core/applog/sendAlertInfo
db_alert_interface = http://127.0.0.1:8180/core/dblog/sendAlertInfo
# 邮件接收人
mailTo = shuyi666@aliyun.com
mailTo = dhdhhd@aliyun.com
# 告警end
# 审计日志begin
......
[\u6570\u636E\u6301\u4E45\u5316\u914D\u7F6E]
#IP信息
#jdbc.host=192.168.2.127
#正式环境
jdbc.host=demo.docmis.cn
#jdbc.host=192.168.2.228
#端口信息
#jdbc.port=54321
jdbc.port=23500
#数据库实例
jdbc.kingbase.dbname=security
#jdbc.dbname=encryption_gateway
#jdbc.dbname=encryption_king
#如果配置数据库类型是H2,则jdbc.dbname的值是public
jdbc.dbname=public
#jdbc.dbname=encryption
#basePath=/home/trustz
basePath=/home/nse-server/trustz
maskingPath=/home/masking
#jdbc-mysql
#jdbc.driver=com.mysql.jdbc.Driver
jdbc.driver= com.mysql.cj.jdbc.Driver
#jdbc.driver=dm.jdbc.driver.DmDriver
#jdbc.driver=org.h2.Driver
#jdbc-kingbase8
#jdbc.driver: com.kingbase8.Driver
#正式环境
#url-kingbase8
#jdbc.url=jdbc:kingbase8://${jdbc.host}:${jdbc.port}/${jdbc.kingbase.dbname}?currentSchema=${jdbc.dbname}
jdbc.url=jdbc:mysql://${jdbc.host}:${jdbc.port}/${jdbc.dbname}?useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=GMT%2B8
#jdbc.url=jdbc:dm://${jdbc.host}:${jdbc.port}?SCHEMA=${jdbc.dbname}&columnNameUpperCase=false&genKeyNameCase=2
#H2 database
#jdbc.url=jdbc:h2:d:/db/enc;AUTO_SERVER=TRUE
#jdbc.url=jdbc:h2:/home/trustz/data/db/enc;AUTO_SERVER=TRUE
#jdbc.user=sa
#jdbc.password=sa
#jdbc.user=SYSDBA
#jdbc.password=SYSDBA
jdbc.user=root
jdbc.password=!QAZ2wsx#EDC2022
#jdbc.user=system
#jdbc.password=system
jdbc.dialect=org.hibernate.dialect.MySQL5Dialect
#jdbc.dialect=org.hibernate.dialect.DmDialect
#jdbc.dialect=org.hibernate.dialect.H2Dialect
[\u5168\u5C40\u53C2\u6570]
#license\u5B58\u653E\u8DEF\u5F84
licensedir=${basePath}/data/infa_file/lic
#licensedir=D:/lic
#\u662F\u5426\u9700\u8981licenseKey 0:\u4E0D\u9700\u8981\u9A8C\u8BC1 1\uFF1A\u9700\u8981\u9A8C\u8BC1
isNeedLicenseKey=0
#错误登录允许次数
login_limit_num=5
#平台jar包部署路径
deployPath=${basePath}/data
#系统版本号
sys_version=v2.9
[\u65E5\u5FD7\u5C5E\u6027]
#\u8BBE\u7F6E\u65E5\u5FD7\u6587\u4EF6\u8DEF\u5F84
logpath = ./loginfo
#logpath = ${basePath}/data/loginfo
#\u83B7\u53D6\u53D1\u73B0\u8FC7\u7A0B\u65E5\u5FD7\u8DEF\u5F84
tasklogger = ${maskingPath}/app/tasklogger/
#\u53D1\u73B0\u7ED3\u679C\u5BFC\u51FA\u8DEF\u5F84
discoveryResultDir=${basePath}/data/infa_project/export/
#\u53D1\u73B0\u4EFB\u52A1\u7248\u672C\u8DEF\u5F84
#TaskVersion=D:\\version\\TaskVersion\\
#ProjectVersion=D:\\version\\ProjectVersion\\
#AllVersion=D:\\version\\AllVersion\\
TaskVersion=${basePath}/data/infa_project/TaskVersion/
ProjectVersion=${basePath}/data/infa_project/ProjectVersion/
AllVersion=${basePath}/data/infa_project/AllVersion/
CustomVersion=${basePath}/data/infa_project/CustomVersion/
#\u7528\u4E8E\u56DE\u64A4\u7684\u5907\u4EFDsql\u8DEF\u5F84
ResetResult=${basePath}/data/infa_project/ResetResult/
#\u53D1\u73B0\u4EFB\u52A1\u76D1\u63A7\u8DEF\u5F84
#monitorFile=D:\\work\\ghca 2.0\\file\\
monitorFile=${basePath}/data/infa_project/monitorFile/
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u7ED3\u679C\u9875\u662F\u5426\u9700\u8981\u663E\u793A\u5339\u914D\u7387(0-\u4E0D\u663E\u793A\uFF1B1-\u663E\u793A)
isRate=1
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u7ED3\u679C\u9875\u5339\u914D\u7387\u663E\u793A\u6761\u6570
rateNum=10
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u6267\u884C\u5339\u914D\u548C\u4E0D\u5339\u914D\u6587\u4EF6\u5B58\u653E\u5730\u5740
#rateDir=D:\\work\\ghca 2.0\\rate\\
rateDir=${basePath}/data/infa_project/rate/
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u76F8\u5173\u6027\u7B97\u6CD5\u7F6E\u4FE1\u5EA6\u663E\u793A\u5927\u4E8E60%
confidenceRate=0.6
#\u53D1\u73B0\u4EFB\u52A1\uFF0C\u65E0\u7ED3\u6784\u6587\u4EF6\u56FE\u7247\u53D1\u73B0\uFF0C\u4E0B\u8F7D\u672C\u5730\u8DEF\u5F84\uFF08FTP/SFTP\uFF09
pictureDir=${basePath}/data/infa_project/pic/
#发现任务 一次查询的条数
readCntByOneTime=10000
#发现任务 JDBC一次插入的条数
jdbc_insert_batche_size=10000
#是否为重复数据
isrepeatvalue=0.1
#excelModelDown=D:\\masking\\
#数据域-发现规则导入模板
#dataAreaExportTemplate=E:/
dataAreaExportTemplate=${basePath}/data/infa_file/dataAreaExportTemplate/
[OTHERS]
#要导出的数据库名称
#jdbc.exportDatabaseName=encryption_test
jdbc.exportDatabaseName=${jdbc.dbname}
#要导入的数据库所在路径
jdbc.importPath=${basePath}/data/importMysqlSql/backup.sql
#mysql下的bin文件的路径
#MysqlPath=C:\\Program Files (x86)\\MySQL\\MySQL Server 5.5\\bin\\
MysqlPath=/usr/bin/
#MysqlPath=C:\\Program Files (x86)\\Mysql\\mysql-8.0.23-winx64\\bin\\
#导出的数据库存放路径
jdbc.exportPath=${basePath}/data/exportMysqlSql/backup.sql
#jdbc.exportPath=D:/ghca/data/exportMysqlSql/backup.sql
# 要导入的目标数据库
jdbc.importDatabaseName=${jdbc.dbname}
#jdbcproxypath = E:\\IdeaProjects\\p6-encryption-pom\\target\\encryption-proxy-jar-with-dependencies.jar
#jdbcproxypath = C:\\work\\repo\\com\\ghca\\encryption-proxy\\1.0\\encryption-proxy-1.0-jar-with-dependencies.jar
jdbcproxypath = ${basePath}/data/encryption/encryption-proxy.jar
# udf 目录
udfproxypath = ${basePath}/data/udf/
# sqlserverudf 文件位置
sqlserverudfpath = ${basePath}/data/udf/sqlserver-udf.dll
odbcproxypath = ${basePath}/data/encryption/masking_odbc.zip
jdbcmaskingfilename = encryption.properties
desensitizationStrategyfilename = masking.properties
#数据库连接文件的存储路径
databaselink = ${basePath}/data
#exceptionSqlPath = D:/exceptionSql/
exceptionSqlPath = ${basePath}/data/exceptionSql/
# 轮询自检下载文件路径
#exceptionKmLogPath = D:/kmlog/
#轮询自检 下载文件地址
exceptionKmLogPath = ${basePath}/data/kmlog/
logDetail = logDetail.log
#加密机SDKKeyId index SM2内部加密专用 SM4不需要
#SDKKeyId:110 是密钥索引 SDKPassword:pass是私钥授权码
SDKKeyId = 110
#加密机password SM2内部加密专用 SM4不需要
SDKPassword = pass
SDKPath = ${basePath}/data/HsmConfig.properties
#SDKPath = E:/HsmConfig/HsmConfig.properties
copySuffix = _copy
initialSuffix = _initial
cipherSuffix = _cipher
digestSuffix = _enc
checkNum = 100
corePoolSize = 4
maxPoolSize = 4
#线程池最大队列数
queueCapacity = 4
#oceanbase最大提交条数
oceanbasecount = 3
encbatchcount = 10000
#ES等待时间
esWaitTime = 2000
loginfilepath = D:/login.properties
# 告警begin
app_alert_interface = http://127.0.0.1:8180/core/applog/sendAlertInfo
db_alert_interface = http://127.0.0.1:8180/core/dblog/sendAlertInfo
# 邮件接收人
mailTo = abcld@aliyun.com
# 告警end
# 审计日志begin
# app审计日志存储路径\app规则匹配日志存储路径
#appserverlogpath = E:/app/
appserverlogpath = ${maskingPath}/app/appdata/
#审计日志转syslog
#syslog4auditserverlogpath = E:/s4a
syslog4auditserverlogpath = ${maskingPath}/s4a/s4adata/
# db 访问日志存储路径
#dbaccesslogpath = E:/syslog/access/
dbaccesslogpath = ${maskingPath}/app/syslog/access/
# db 规则匹配日志存储路径
#dbrulelogpath = E:/syslog/rule/
dbrulelogpath = ${maskingPath}/app/syslog/rule/
# 语句类型
auditlog_type = SELECT,UPDATE,DELETE
# app审计服务地址
applogServerHost = 127.0.0.1
applogServerPort = 32376
# db审计服务地址
dblogServerHost = 127.0.0.1
dblogServerPort = 40000
#审计日志转syslog
s4aServerHost = 127.0.0.1
s4aServerPost = 40000
# 审计日志end
# 数据源字符编码
oraclecharset = AL32UTF8,ZHS16GBK
mysqlcharset = UTF8,GBK
postgresqlcharset = UTF8,GBK
hivecharset = UTF8,BINARY
mssqlservercharset = UTF8,GBK
db2charset = Unicode,UCS-2,UTF16,UTF8
mariadbcharset = UTF8,BINARY
informixcharset = UTF8,819
dmcharset = GB18030,UTF8,EUC-KR
sybasecharset = UTF8,GBK
sybaseiqcharset = UTF8,GBK
gbasecharset = UTF8,GBK
kingbasecharset = UTF8,GBK
oscarcharset = UTF8,GBK
uxdbcharset = UTF8,GBK
greenplumcharset = UTF8,GBK
teradatacharset = UTF8,GBK
highgocharset = UTF8,GBK
cachecharset = UTF8,GBK
kakfacharset = UTF8,GBK
KAFKAcharset = UTF8,GBK
hadoopcharset = UTF8,GBK
hbasecharset = UTF8,GBK
gausscharset = UTF8,GBK
hive_tdhcharset = UTF8,GBK
mongodbcharset = UTF8,GBK
escharset = UTF8,GBK
oceanbasecharset = UTF8,GBK
gaussdbcharset = UTF8,GBK
#like是否执行按照单个字符进行加密开关。1表示按照单个字符进行加密。0表示按照字符串整体进行加密。 默认不开启为0
encryption.like.open = 1
#单个字符加密后连接起来用某个字符进行分割
encryption.like.split = #
#加密标识
encryption.label = jm_
#加密标识后缀
encryption.label.suffix = _jm
#加密标识
encryption.es.label = jmjzrjjm
#程序中一切跟配置库相关的操作 默认是mysql 为0。peizhikudbtype 为1时 代表 人大金仓 peizhikudbtype 为2时 代表达梦配置库 为3时 代表h2database配置库
peizhikudbtype = 0
#是否开启操作日志记录
isSaveServerClientLog=false
#加密卡秘钥配置 x,y分量可以通过导出公玥接口每次获取 M是密钥明文的哈希值,C是密钥的密文 L 向量长度是16字节的
ECCCipher.bits=0L
ECCCipher.x=0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -83, 42, -40, -13, -22, -108, 70, 42, 123, 40, 88, -37, 76, 90, 105, 55, 46, -59, -3, -43, 71, -120, 104, -84, 42, -77, 94, -112, 45, -93, 58, -45
ECCCipher.y=0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -65, 18, -33, -28, -99, 39, -126, 89, 37, -15, -82, -46, -25, 40, 2, -44, 71, 102, 30, 113, -63, -78, -89, -55, -52, 38, 61, -109, 4, -122, 38, -30
ECCCipher.M=0, 27, 28, 35, 90, -3, -128, 76, -117, -31, 83, 84, 46, 61, -97, -92, 120, -19, 46, -121, -27, -107, -58, 90, 118, 0, 6, 105, 68, -41, 96, 30
ECCCipher.C=-18, 92, 111, -108, 103, -78, 28, 59, 67, -5, -101, -38, 40, 115, -72, 71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
ECCCipher.L=16
#加密卡句柄获取权限密码配置
Ccore.PhKeyHandle.password=11111111
#加密校验定时任务时间
digestperiod=5
#加密校验秘钥
digestkey=ghca
#加密/解密生成copy列,列最大长度设置
encryption.field.max=10000
#mongodb主键
encryption.mongodbpk=_id
#达梦拼sql
dmschema=encryption
#导入导出 0 excel 1 加密txt
is_enc_backupfile=1
#加密txt秘钥
is_enc_backupfile_key=6C603060274AED6FFB93EDB40E23E173
#0 拉取当前项目下的加密配置信息 1 拉取平台下 所有项目的配置信息
is_pull_all=0
#保存旧日志路径
oldlogpath=${basePath}/data/systemlog
#系统控制台路径
commandpath=${basePath}/data/commandlog
#json加密标识
jsonkey=jsonkey
#三未JCE接口配置文件
swsdsPath=${basePath}/jdk_dev
#是否依赖历史数据加密(0.不需要加密历史数据 1.必须加密历史数据 )
is_encrypted=1
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论