diff --git a/assembly/bin/executeSQL.sh b/assembly/bin/executeSQL.sh
index 8b60d0f08c..eb4da70655 100644
--- a/assembly/bin/executeSQL.sh
+++ b/assembly/bin/executeSQL.sh
@@ -69,6 +69,10 @@ function executeSQL() {
isSuccess "source dss_dataapi_ddl.sql"
mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source $DB_CONF_PATH/apps/dss_guide_ddl.sql"
isSuccess "source dss_guide_ddl.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source $DB_CONF_PATH/apps/datamodel.sql"
+ isSuccess "source datamodel.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source $DB_CONF_PATH/apps/datawarehouse.sql"
+ isSuccess "source datawarehouse.sql"
mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source $DB_DML_PATH"
isSuccess "source dss_dml_real.sql"
echo "Rebuild the table"
diff --git a/assembly/bin/install.sh b/assembly/bin/install.sh
index d3b02f36cd..a6e3761760 100644
--- a/assembly/bin/install.sh
+++ b/assembly/bin/install.sh
@@ -121,11 +121,18 @@ function replaceCommonIp() {
DSS_DATA_API_SERVER_PORT=9208
fi
- if [ -z "$DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP" ]; then
- DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP=$LOCAL_IP
+ if [ -z "$DSS_DATA_ASSETS_SERVER_INSTALL_IP" ]; then
+ DSS_DATA_ASSETS_SERVER_INSTALL_IP=$LOCAL_IP
fi
- if [ -z "$DSS_DATA_GOVERNANCE_SERVER_PORT" ]; then
- DSS_DATA_GOVERNANCE_SERVER_PORT=9209
+ if [ -z "$DSS_DATA_ASSETS_SERVER_INSTALL_IP" ]; then
+ DSS_DATA_ASSETS_SERVER_INSTALL_IP=9300
+ fi
+ if [ -z "$DSS_DATAMODEL_CENTER_SERVER_INSTALL_IP" ]; then
+ DSS_DATAMODEL_CENTER_SERVER_INSTALL_IP=9400
+ fi
+
+ if [ -z "$DSS_DATA_WAREHOUSE_SERVER_INSTALL_IP" ]; then
+ DSS_DATA_WAREHOUSE_SERVER_INSTALL_IP=9500
fi
if [ -z "$DSS_GUIDE_SERVER_INSTALL_IP" ]; then
@@ -343,13 +350,38 @@ function installDssProject() {
CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
installPackage
- ###install dss-data-governance-server
- SERVER_NAME=dss-data-governance-server
- SERVER_IP=$DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP
- SERVER_PORT=$DSS_DATA_GOVERNANCE_SERVER_PORT
- UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-data-governance/$SERVER_NAME
- LIB_PATH=$SERVER_HOME/lib/dss-data-governance
- LOG_PATH=$SERVER_HOME/logs/dss-data-governance/$SERVER_NAME
+ ###install dss-data-assets-server
+ SERVER_NAME=dss-data-assets-server
+ SERVER_IP=$DSS_DATA_ASSETS_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_DATA_ASSETS_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-data-assets-server/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-data-assets-server
+ LIB_PATH=$SERVER_HOME/lib/atlas-application.properties
+ LOG_PATH=$SERVER_HOME/logs/dss-data-assets-server/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ installPackage
+
+ ###install dss-data-warehouse-server
+ SERVER_NAME=dss-data-warehouse-server
+ SERVER_IP=$DSS_DATA_WAREHOUSE_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_DATA_WAREHOUSE_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-data-warehouse-server/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-data-warehouse-server
+ LOG_PATH=$SERVER_HOME/logs/dss-data-warehouse-server/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ installPackage
+
+ ###install dss-datamodel-center-server
+ SERVER_NAME=dss-datamodel-center-server
+ SERVER_IP=$DSS_DATAMODEL_CENTER_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_DATAMODEL_CENTER_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-datamodel-center-server/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-datamodel-center-server
+ LOG_PATH=$SERVER_HOME/logs/dss-datamodel-center-server/$SERVER_NAME
CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
diff --git a/assembly/config/config.sh b/assembly/config/config.sh
index d36b2acb9a..4ff0737a7f 100644
--- a/assembly/config/config.sh
+++ b/assembly/config/config.sh
@@ -52,13 +52,19 @@ DSS_FLOW_EXECUTION_SERVER_PORT=9006
###dss-scriptis-server
DSS_SCRIPTIS_SERVER_INSTALL_IP=127.0.0.1
DSS_SCRIPTIS_SERVER_PORT=9008
+###dss-data-assets-server
+DSS_DATA_ASSETS_SERVER_INSTALL_IP=127.0.0.1
+DSS_DATA_ASSETS_SERVER_PORT=9300
+###dss-datamodel-center-server
+DSS_DATAMODEL_CENTER_SERVER_INSTALL_IP=127.0.0.1
+DSS_DATAMODEL_CENTER_SERVER_PORT=9400
+###dss-data-warehouse-server
+DSS_DATA_WAREHOUSE_SERVER_INSTALL_IP=127.0.0.1
+DSS_DATA_WAREHOUSE_SERVER_PORT=9500
###dss-data-api-server
DSS_DATA_API_SERVER_INSTALL_IP=127.0.0.1
DSS_DATA_API_SERVER_PORT=9208
-###dss-data-governance-server
-DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP=127.0.0.1
-DSS_DATA_GOVERNANCE_SERVER_PORT=9209
###dss-guide-server
DSS_GUIDE_SERVER_INSTALL_IP=127.0.0.1
DSS_GUIDE_SERVER_PORT=9210
@@ -83,5 +89,4 @@ EMAIL_PORT=25
EMAIL_USERNAME=xxx@163.com
EMAIL_PASSWORD=xxxxx
EMAIL_PROTOCOL=smtp
-############## ############## dss_appconn_instance configuration end ############## ##############
-
+############## ############## dss_appconn_instance configuration end ############## ##############
\ No newline at end of file
diff --git a/assembly/dss-package/pom.xml b/assembly/dss-package/pom.xml
index 1f66fc60b4..c919e12025 100644
--- a/assembly/dss-package/pom.xml
+++ b/assembly/dss-package/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
dss-package
diff --git a/assembly/dss-package/src/main/assembly/distribution.xml b/assembly/dss-package/src/main/assembly/distribution.xml
index 09fce1e059..8ca7ac5237 100644
--- a/assembly/dss-package/src/main/assembly/distribution.xml
+++ b/assembly/dss-package/src/main/assembly/distribution.xml
@@ -287,20 +287,40 @@
- ${basedir}/../../dss-data-api/dss-data-api-server/target/out/dss-data-api-server/lib/
+ ${basedir}/../../dss-apps/dss-data-api/dss-data-api-server/target/out/dss-data-api-server/lib/
- lib/dss-data-api/dss-data-api-server
+ lib/dss-apps/dss-data-api-server
+
+ **/*
+
+
+
+
+
+
+ ${basedir}/../../dss-apps/dss-dataasset-management/dss-data-assets-server/target/out/dss-data-assets-server/lib/
+
+ lib/dss-apps/dss-data-assets-server
+
+ **/*
+
+
+
+
+
+ ${basedir}/../../dss-apps/dss-datamodel-center/dss-datamodel-center-server/target/out/dss-datamodel-center-server/lib/
+
+ lib/dss-apps/dss-datamodel-center-server
**/*
-
- ${basedir}/../../dss-data-governance/dss-data-governance-server/target/out/dss-data-governance-server/lib/
+ ${basedir}/../../dss-apps/dss-datawarehouse-design/dss-data-warehouse-server/target/out/dss-data-warehouse-server/lib/
- lib/dss-data-governance/dss-data-governance-server
+ lib/dss-apps/dss-data-warehouse-server
**/*
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 35c0daa9dd..fcd3cb222c 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
pom
4.0.0
diff --git a/conf/atlas-application.properties b/conf/atlas-application.properties
index e69de29bb2..e37bc792d9 100644
--- a/conf/atlas-application.properties
+++ b/conf/atlas-application.properties
@@ -0,0 +1,67 @@
+# Generated by Apache Ambari. Sun Feb 27 10:44:18 2022
+
+atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS
+atlas.audit.hbase.zookeeper.quorum=hdp
+atlas.audit.zookeeper.session.timeout.ms=60000
+atlas.authentication.keytab=/etc/security/keytabs/atlas.service.keytab
+atlas.authentication.method.file=true
+atlas.authentication.method.file.filename=/home/hadoop/software/apache-atlas-2.0.0/conf/users-credentials.properties
+atlas.authentication.method.kerberos=false
+atlas.authentication.method.ldap=false
+atlas.authentication.method.ldap.ad.base.dn=
+atlas.authentication.method.ldap.ad.bind.dn=
+atlas.authentication.method.ldap.ad.bind.password=
+atlas.authentication.method.ldap.ad.default.role=ROLE_USER
+atlas.authentication.method.ldap.ad.domain=
+atlas.authentication.method.ldap.ad.referral=ignore
+atlas.authentication.method.ldap.ad.url=
+atlas.authentication.method.ldap.ad.user.searchfilter=(sAMAccountName={0})
+atlas.authentication.method.ldap.base.dn=
+atlas.authentication.method.ldap.bind.dn=
+atlas.authentication.method.ldap.bind.password=
+atlas.authentication.method.ldap.default.role=ROLE_USER
+atlas.authentication.method.ldap.groupRoleAttribute=cn
+atlas.authentication.method.ldap.groupSearchBase=
+atlas.authentication.method.ldap.groupSearchFilter=
+atlas.authentication.method.ldap.referral=ignore
+atlas.authentication.method.ldap.type=ldap
+atlas.authentication.method.ldap.url=
+atlas.authentication.method.ldap.user.searchfilter=
+atlas.authentication.method.ldap.userDNpattern=uid=
+atlas.authentication.principal=atlas
+atlas.authorizer.impl=ranger
+atlas.cluster.name=bigdata_all_in_one
+atlas.enableTLS=false
+atlas.graph.index.search.solr.mode=cloud
+atlas.graph.index.search.solr.wait-searcher=true
+atlas.graph.index.search.solr.zookeeper-url=hadoop:2181/infra-solr
+atlas.graph.storage.hbase.table=atlas_janus
+atlas.graph.storage.hostname=hdp
+atlas.kafka.auto.commit.enable=false
+atlas.kafka.bootstrap.servers=hadoop:6667
+atlas.kafka.hook.group.id=atlas
+atlas.kafka.zookeeper.connect=hadoop:2181
+atlas.kafka.zookeeper.connection.timeout.ms=30000
+atlas.kafka.zookeeper.session.timeout.ms=60000
+atlas.kafka.zookeeper.sync.time.ms=20
+atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
+atlas.lineage.schema.query.Table=Table where __guid='%s'\, columns
+atlas.notification.create.topics=true
+atlas.notification.embedded=false
+atlas.notification.replicas=1
+atlas.notification.topics=ATLAS_HOOK,ATLAS_ENTITIES
+atlas.proxyusers=
+atlas.rest.address=http://hadoop:21000
+atlas.server.address.id1=hadoop:21000
+atlas.server.bind.address=0.0.0.0
+atlas.server.ha.enabled=false
+atlas.server.http.port=21000
+atlas.server.https.port=21443
+atlas.server.ids=id1
+atlas.simple.authz.policy.file=/home/hadoop/software/apache-atlas-2.0.0/conf/atlas-simple-authz-policy.json
+atlas.solr.kerberos.enable=false
+atlas.ssl.exclude.protocols=TLSv1.2
+atlas.sso.knox.browser.useragent=
+atlas.sso.knox.enabled=false
+atlas.sso.knox.providerurl=
+atlas.sso.knox.publicKey=
diff --git a/conf/dss-data-assets-server.properties b/conf/dss-data-assets-server.properties
new file mode 100644
index 0000000000..82eb1232b3
--- /dev/null
+++ b/conf/dss-data-assets-server.properties
@@ -0,0 +1,88 @@
+#
+# /*
+# * Copyright 2019 WeBank
+# *
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+#
+# Spring configurations
+spring.server.port=9300
+spring.spring.application.name=dss-data-assets-server
+wds.linkis.test.mode=true
+wds.linkis.log.clear=true
+wds.linkis.server.version=v1
+
+
+wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/hive?useSSL=false&characterEncoding=UTF-8
+
+wds.linkis.server.mybatis.datasource.username=root
+
+***REMOVED***root
+
+
+
+
+
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.data.governance.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/data/governance/dao/impl/*.xml
+
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.data.governance.entity
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.data.governance.dao
+
+
+# atlas config
+atlas.rest.address=http://localhost:21000
+atlas.username=admin
+atlas.password=admin123
+atlas.client.readTimeoutMSecs=60000
+atlas.client.connectTimeoutMSecs=60000
+
+# create root classification
+atlas.root.indicator=datamodel_indicator
+atlas.root.measure=datamodel_measure
+atlas.root.dimension=datamodel_dimension
+atlas.root.layer=datamodel_layer
+atlas.root.theme=datamodel_theme
+
+# create root glossary
+atlas.root.label=datamodel_label
+atlas.root.collection=datamodel_collection
+
+
+# hive metadata config
+metastore.datasource.driver=com.mysql.jdbc.Driver
+metastore.datasource.url=jdbc:mysql://localhost:3306/hive?characterEncoding=UTF-8
+metastore.datasource.username=root
+metastore.datasource.password=root
+
+
+wds.workspace.client.serverurl=http://localhost:9001
+
+wds.workspace.client.authtoken.key=hadoop
+
+wds.workspace.client.authenticationStrategy=org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
+
+wds.workspace.client.discovery.enabled=false
+
+wds.workspace.client.authtoken.value=hadoop
+
+spring.spring.mvc.servlet.path=/api/rest_j/v1
+
+wds.workspace.client.dws.version=v1
+
+wds.wedatasphere.data.assert.client.strategy=static
+
+
diff --git a/conf/dss-data-governance-server.properties b/conf/dss-data-governance-server.properties
deleted file mode 100644
index b3980c6ccc..0000000000
--- a/conf/dss-data-governance-server.properties
+++ /dev/null
@@ -1,53 +0,0 @@
-#
-# /*
-# * Copyright 2019 WeBank
-# *
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-#
-
-# Spring configurations
-spring.server.port=9209
-spring.spring.application.name=dss-data-governance-server
-
-wds.linkis.log.clear=true
-
-wds.linkis.server.version=v1
-
-##restful
-wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.data.asset.restful,com.webank.wedatasphere.dss.data.classification.restful
-
-##mybatis
-wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/data/asset/dao/impl/*.xml
-wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.data.asset.entity
-wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.data.asset.dao,com.webank.wedatasphere.dss.data.warehouse.dao,com.webank.wedatasphere.dss.data.warehouse.mapper
-
-#wds.linkis.gateway.ip=127.0.0.1
-#wds.linkis.gateway.port=9001
-#wds.linkis.gateway.url=http://127.0.0.1:9001/
-
-
-# atlas config
-atlas.rest.address=http://xxxxxxx:21000
-atlas.username=xxxxxxxx
-atlas.password=yyyyyyyyy
-atlas.client.readTimeoutMSecs=60000
-atlas.client.connectTimeoutMSecs=60000
-
-atlas.cluster.name=primary
-
-# hive metadata config
-metastore.datasource.driver=com.mysql.jdbc.Driver
-metastore.datasource.url=jdbc:mysql://xxxxxx:yyyy/metastore?characterEncoding=UTF-8
-metastore.datasource.username=xxxxxx
-metastore.datasource.password=yyyyyy
\ No newline at end of file
diff --git a/conf/dss-data-warehouse-server.properties b/conf/dss-data-warehouse-server.properties
new file mode 100644
index 0000000000..e70544cb9d
--- /dev/null
+++ b/conf/dss-data-warehouse-server.properties
@@ -0,0 +1,51 @@
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+spring.server.port=9500
+spring.spring.application.name=dss-data-warehouse-server
+wds.linkis.test.mode=true
+
+wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/governance?useSSL=false&characterEncoding=UTF-8
+
+wds.linkis.server.mybatis.datasource.username=root
+
+***REMOVED***root
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+
+## datasource client
+wds.datawarehouse.datasource.client.serverurl=http://hdp:8085
+wds.datawarehouse.datasource.client.authtoken.key=hadoop
+wds.datawarehouse.datasource.client.authtoken.value=hadoop
+wds.datawarehouse.datasource.client.dws.version=v1
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.warehouse.restful
+
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/warehouse/dao/mapper/impl/*.xml
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.warehouse.dao.mapper
+
+wds.entity.workspace.name.auto.transform=false
+
+spring.spring.mvc.servlet.path=/api/rest_j/v1
+
+
+
+
diff --git a/conf/dss-datamodel-center-server.properties b/conf/dss-datamodel-center-server.properties
new file mode 100644
index 0000000000..10c37b1c8c
--- /dev/null
+++ b/conf/dss-datamodel-center-server.properties
@@ -0,0 +1,107 @@
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+spring.server.port=9400
+spring.spring.application.name=dss-datamodel-center-server
+
+wds.linkis.test.mode=true
+
+
+
+wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/datamodel?useSSL=false&characterEncoding=UTF-8
+
+wds.linkis.server.mybatis.datasource.username=root
+
+***REMOVED***root
+
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+
+wds.wedatasphere.linkis.serverurl=http://localhost:8085
+
+wds.wedatasphere.linkis.authtoken.key=hadoop
+
+wds.wedatasphere.linkis.discovery.enabled=false
+
+wds.wedatasphere.linkis.authenticationStrategy=org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
+
+wds.wedatasphere.linkis.authtoken.value=hadoop
+
+wds.wedatasphere.linkis.dws.version=v1
+
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.datamodel.indicator.restful,\
+ com.webank.wedatasphere.dss.datamodel.table.restful,\
+ com.webank.wedatasphere.dss.datamodel.dimension.restful,\
+ com.webank.wedatasphere.dss.datamodel.measure.restful
+
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/datamodel/dimension/dao/impl/*.xml,\
+ classpath*:com/webank/wedatasphere/dss/datamodel/table/dao/impl/*.xml,\
+ classpath*:com/webank/wedatasphere/dss/datamodel/measure/dao/impl/*.xml,\
+ classpath*:com/webank/wedatasphere/dss/datamodel/indicator/dao/impl/*.xml
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.datamodel.dimension.dao,\
+ com.webank.wedatasphere.dss.datamodel.table.dao,\
+ com.webank.wedatasphere.dss.datamodel.measure.dao,\
+ com.webank.wedatasphere.dss.datamodel.indicator.dao
+
+wds.wedatasphere.warehouse.client.serverurl=http://localhost:9001
+
+wds.wedatasphere.warehouse.client.authtoken.key=hadoop
+
+wds.wedatasphere.warehouse.client.authenticationStrategy=org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
+
+wds.wedatasphere.warehouse.client.authtoken.value=hadoop
+
+wds.wedatasphere.warehouse.client.discovery.enabled=false
+
+wds.wedatasphere.warehouse.client.dws.version=v1
+
+
+wds.wedatasphere.assets.client.serverurl=http://localhost:9001
+
+wds.wedatasphere.assets.client.authtoken.key=hadoop
+
+wds.wedatasphere.assets.client.authenticationStrategy=org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
+
+wds.wedatasphere.assets.client.discovery.enabled=false
+
+wds.wedatasphere.assets.client.authtoken.value=hadoop
+
+wds.wedatasphere.assets.client.dws.version=v1
+
+
+
+wds.workspace.client.serverurl=http://localhost:9001
+
+wds.workspace.client.authtoken.key=hadoop
+
+wds.workspace.client.authenticationStrategy=org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
+
+wds.workspace.client.discovery.enabled=false
+
+wds.workspace.client.authtoken.value=hadoop
+
+wds.workspace.client.dws.version=v1
+spring.spring.mvc.servlet.path=/api/rest_j/v1
+
+
+
+wds.wedatasphere.data.model.client.strategy=static
diff --git a/db/apps/datamodel.sql b/db/apps/datamodel.sql
new file mode 100644
index 0000000000..ae5b1ad9df
--- /dev/null
+++ b/db/apps/datamodel.sql
@@ -0,0 +1,352 @@
+
+SET NAMES utf8mb4;
+SET FOREIGN_KEY_CHECKS = 0;
+
+
+-- ----------------------------
+-- Table structure for dss_datamodel_dimension
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_dimension`;
+CREATE TABLE `dss_datamodel_dimension` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `field_identifier` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `formula` varchar(255) COLLATE utf8_bin NOT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `owner` varchar(64) COLLATE utf8_bin NOT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` tinyint(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '英文',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `name` (`name`) USING BTREE,
+ UNIQUE KEY `field_identifier` (`field_identifier`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=27 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_indicator
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_indicator`;
+CREATE TABLE `dss_datamodel_indicator` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `field_identifier` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `owner` varchar(64) COLLATE utf8_bin NOT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` tinyint(1) NOT NULL,
+ `is_core_indicator` tinyint(1) NOT NULL,
+ `theme_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的theme的names',
+ `layer_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的layer的names',
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `version` varchar(255) COLLATE utf8_bin NOT NULL,
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题英文',
+ `theme_area_en` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '英文名称',
+ `layer_area_en` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '英文名称',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `name` (`name`) USING BTREE,
+ KEY `field_identifier` (`field_identifier`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=82 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_indicator_content
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_indicator_content`;
+CREATE TABLE `dss_datamodel_indicator_content` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `indicator_id` int(11) NOT NULL,
+ `version` varchar(255) COLLATE utf8_bin NOT NULL,
+ `indicator_type` int(4) NOT NULL COMMENT '0 原子 1 衍生 2 派生 3 复杂 4 自定义',
+ `measure_id` int(11) DEFAULT NULL,
+ `indicator_source_info` text COLLATE utf8_bin NOT NULL COMMENT '指标来源信息',
+ `formula` varchar(255) COLLATE utf8_bin NOT NULL,
+ `business` varchar(255) COLLATE utf8_bin NOT NULL,
+ `business_owner` varchar(255) COLLATE utf8_bin NOT NULL,
+ `calculation` varchar(255) COLLATE utf8_bin NOT NULL,
+ `calculation_owner` varchar(255) COLLATE utf8_bin NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=158 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_indicator_version
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_indicator_version`;
+CREATE TABLE `dss_datamodel_indicator_version` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `owner` varchar(64) COLLATE utf8_bin NOT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `version` varchar(255) COLLATE utf8_bin NOT NULL,
+ `version_context` text COLLATE utf8_bin NOT NULL COMMENT '历史版本详细信息快照',
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `name` (`name`,`version`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=50 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_label
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_label`;
+CREATE TABLE `dss_datamodel_label` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `field_identifier` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `params` text COLLATE utf8_bin COMMENT '标签键值对 json',
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `owner` varchar(64) COLLATE utf8_bin NOT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` tinyint(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '英文',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `name` (`name`) USING BTREE,
+ UNIQUE KEY `field_identifier` (`field_identifier`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=34 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_measure
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_measure`;
+CREATE TABLE `dss_datamodel_measure` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `field_identifier` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `formula` varchar(255) COLLATE utf8_bin NOT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `owner` varchar(64) COLLATE utf8_bin NOT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` tinyint(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '英文',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `name` (`name`) USING BTREE,
+ UNIQUE KEY `field_identifier` (`field_identifier`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table`;
+CREATE TABLE `dss_datamodel_table` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `data_base` varchar(255) COLLATE utf8_bin NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `alias` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `creator` varchar(255) COLLATE utf8_bin NOT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `warehouse_layer_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓层级',
+ `warehouse_layer_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓层级英文',
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题英文',
+ `lifecycle` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '生命周期',
+ `is_partition_table` tinyint(1) NOT NULL,
+ `is_available` tinyint(1) NOT NULL,
+ `storage_type` varchar(255) COLLATE utf8_bin NOT NULL COMMENT '存储类型:hive/mysql',
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `compress` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '压缩格式',
+ `file_type` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '文件格式',
+ `version` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '版本信息:默认1',
+ `is_external` tinyint(1) NOT NULL COMMENT '是否外部表 0 内部表 1外部表',
+ `location` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '外部表时 location',
+ `label` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '标签',
+ `lifecycle_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '生命周期英文',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `database` (`data_base`,`name`) USING BTREE,
+ UNIQUE KEY `name` (`name`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=92 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_collcetion
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_collcetion`;
+CREATE TABLE `dss_datamodel_table_collcetion` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `data_base` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `alias` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `warehouse_layer_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓层级',
+ `warehouse_theme_name` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题格式为: theme_domain_name.theme_name',
+ `lifecycle` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '生命周期',
+ `is_partition_table` tinyint(1) DEFAULT NULL,
+ `is_available` tinyint(1) DEFAULT NULL,
+ `storage_type` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '存储类型:hive/mysql',
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `compress` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '压缩格式',
+ `file_type` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '文件格式',
+ `user` varchar(255) COLLATE utf8_bin NOT NULL COMMENT '收藏人',
+ `version` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '版本信息:默认1',
+ `is_external` tinyint(1) DEFAULT NULL COMMENT '是否外部表 0 内部表 1外部表',
+ `location` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '外部表时 location',
+ `label` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '标签',
+ `guid` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'atlas标识',
+ `warehouse_layer_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓层级英文',
+ `warehouse_theme_name_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '数仓主题英文',
+ `lifecycle_en` varchar(512) COLLATE utf8_bin DEFAULT NULL COMMENT '生命周期英文',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=26 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_columns
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_columns`;
+CREATE TABLE `dss_datamodel_table_columns` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `table_id` int(11) NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `alias` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `type` varchar(64) COLLATE utf8_bin NOT NULL,
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `is_partition_field` tinyint(1) NOT NULL,
+ `is_primary` tinyint(1) NOT NULL,
+ `length` int(11) DEFAULT NULL,
+ `rule` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+ `model_type` int(4) DEFAULT NULL COMMENT '0 维度,1 指标 2 度量',
+ `model_name_en` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '模型信息英文名称',
+ `model_id` int(11) DEFAULT NULL COMMENT '关联具体模型id信息(因为有版本数据表id不可靠,暂时不用)',
+ `model_name` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '模型信息名称',
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=226 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_materialized_history
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_materialized_history`;
+CREATE TABLE `dss_datamodel_table_materialized_history` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `materialized_code` mediumtext COLLATE utf8_bin COMMENT '物化sql',
+ `reason` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '物化原因',
+ `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '物化者',
+ `status` int(4) NOT NULL COMMENT 'succeed,failed,in progess',
+ `create_time` datetime NOT NULL,
+ `last_update_time` datetime NOT NULL,
+ `task_id` varchar(512) COLLATE utf8_bin NOT NULL,
+ `error_msg` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `tableName` varchar(255) COLLATE utf8_bin NOT NULL COMMENT '表名',
+ `data_base` varchar(255) COLLATE utf8_bin NOT NULL,
+ `version` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '版本信息:默认1',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=66 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_params
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_params`;
+CREATE TABLE `dss_datamodel_table_params` (
+ `tbl_id` int(11) NOT NULL,
+ `param_key` varchar(256) COLLATE utf8_bin NOT NULL COMMENT '创建表参数:压缩/orc等',
+ `param_value` mediumtext COLLATE utf8_bin,
+ PRIMARY KEY (`tbl_id`,`param_key`) USING BTREE,
+ KEY `table_params_n49` (`tbl_id`) USING BTREE
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_statics
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_statics`;
+CREATE TABLE `dss_datamodel_table_statics` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `origin_tables` varchar(4000) COLLATE utf8_bin DEFAULT NULL,
+ `table_id` int(11) NOT NULL,
+ `access_count` int(11) NOT NULL,
+ `last_access_time` int(11) NOT NULL,
+ `sample_data_path` varchar(256) COLLATE utf8_bin DEFAULT NULL COMMENT '存储10行用例数据',
+ `sample_update_time` int(11) NOT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_stats
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_stats`;
+CREATE TABLE `dss_datamodel_table_stats` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `data_base` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
+ `name` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `column_count` int(11) NOT NULL COMMENT '字段数',
+ `storage_size` int(11) NOT NULL COMMENT '存储大小',
+ `file_count` int(11) NOT NULL COMMENT '文件数',
+ `partition_count` int(11) NOT NULL COMMENT '分区数',
+ `access_count` int(11) NOT NULL COMMENT '访问次数',
+ `collect_count` int(11) NOT NULL COMMENT '收藏次数',
+ `ref_count` int(11) NOT NULL COMMENT '引用次数',
+ `version` varchar(128) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT '版本信息:默认1',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_datamodel_table_version
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_datamodel_table_version`;
+CREATE TABLE `dss_datamodel_table_version` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `tbl_id` int(11) NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `is_materialized` tinyint(1) NOT NULL COMMENT '是否物化',
+ `table_code` mediumtext COLLATE utf8_bin COMMENT '创建table的sql',
+ `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '版本注释',
+ `version` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '版本信息:默认version0002',
+ `table_params` mediumtext COLLATE utf8_bin,
+ `columns` varchar(4000) COLLATE utf8_bin DEFAULT NULL,
+ `source_type` varchar(128) COLLATE utf8_bin DEFAULT 'add' COMMENT 'rollback,update,add',
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY `table_version` (`version`,`name`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=40 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+DROP TABLE IF EXISTS `dss_datamodel_dictionary`;
+CREATE TABLE `dss_datamodel_dictionary` (
+ `id` int(50) NOT NULL COMMENT '主键id',
+ `code` varchar(128) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型code',
+ `type` varchar(128) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '字典类型',
+ `description` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述',
+ `created_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间',
+ `update_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '更新时间',
+ `sort` int(11) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '数模字典' ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of dss_datamodel_dictionary
+-- ----------------------------
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000001, 'Snappy', 'COMPRESS', 'Snappy', '2021-10-09 15:41:23', '2021-10-09 15:55:44', 1);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000002, 'None', 'COMPRESS', '无', '2021-10-09 15:42:17', '2021-10-20 14:48:59', 2);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000003, 'ORC', 'FILE_STORAGE', 'orc', '2021-10-09 15:55:32', '2021-10-20 14:49:00', 1);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000004, 'Parquet', 'FILE_STORAGE', 'Parquet', '2021-10-09 15:55:32', '2021-10-20 14:49:02', 2);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000005, 'None', 'FILE_STORAGE', '无', '2021-10-09 15:55:32', '2021-10-20 14:49:04', 3);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000006, 'Once', 'LIFECYCLE', '一次', '2021-10-09 15:55:32', '2021-10-20 14:49:05', 1);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000007, 'OneDay', 'LIFECYCLE', '一天', '2021-10-09 15:55:32', '2021-10-20 14:49:06', 2);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000008, 'HalfMonth', 'LIFECYCLE', '半月', '2021-10-09 15:55:32', '2021-10-20 14:49:07', 3);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000009, 'OneMonth', 'LIFECYCLE', '一个月', '2021-10-09 15:55:32', '2021-10-20 14:49:08', 4);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000010, 'ThreeMonths', 'LIFECYCLE', '三个月', '2021-10-09 15:55:32', '2021-10-20 14:49:09', 5);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000011, 'SixMonths', 'LIFECYCLE', '六个月', '2021-10-09 15:55:32', '2021-10-20 14:49:10', 6);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000012, 'OneYear', 'LIFECYCLE', '一年', '2021-10-09 15:55:32', '2021-10-20 14:49:11', 7);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000013, 'ThreeYears', 'LIFECYCLE', '三年', '2021-10-09 15:55:32', '2021-10-20 14:49:12', 8);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000014, 'Hive', 'STORAGE_ENGINE', 'hive', '2021-10-09 15:55:32', '2021-10-20 14:49:19', 1);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000015, 'Mysql', 'STORAGE_ENGINE', 'mysql', '2021-10-09 15:55:32', '2021-10-20 14:49:20', 2);
+INSERT INTO `dss_datamodel_dictionary` VALUES (10000016, 'ES', 'STORAGE_ENGINE', 'es', '2021-10-09 15:55:32', '2021-10-20 14:49:25', 3);
+
+SET FOREIGN_KEY_CHECKS = 1;
diff --git a/db/apps/datawarehouse.sql b/db/apps/datawarehouse.sql
new file mode 100644
index 0000000000..6b5430ffc6
--- /dev/null
+++ b/db/apps/datawarehouse.sql
@@ -0,0 +1,227 @@
+
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+SET NAMES utf8mb4;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE='NO_AUTO_VALUE_ON_ZERO', SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+
+# 转储表 dss_datawarehouse_layer
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_layer`;
+
+CREATE TABLE `dss_datawarehouse_layer` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `en_name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `owner` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` bit(1) NOT NULL,
+ `preset` bit(1) NOT NULL DEFAULT b'0',
+ `sort` int(4) NOT NULL DEFAULT '1',
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `dbs` varchar(255) COLLATE utf8_bin NOT NULL COMMENT '如果为空代表所有的库',
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `status` bit(1) NOT NULL DEFAULT b'1',
+ `lock_version` bigint(20) NOT NULL DEFAULT '1',
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+LOCK TABLES `dss_datawarehouse_layer` WRITE;
+/*!40000 ALTER TABLE `dss_datawarehouse_layer` DISABLE KEYS */;
+
+INSERT INTO `dss_datawarehouse_layer` (`id`, `name`, `en_name`, `owner`, `principal_name`, `is_available`, `preset`, `sort`, `description`, `dbs`, `create_time`, `update_time`, `status`, `lock_version`)
+VALUES
+ (1,'原数据层(ODS)','ods','admin','所有角色',b'1',b'1',10,'由业务系统同步到数据仓库的原始数据,一般不经过加工','ALL','2021-09-01 00:00:00','2021-09-01 00:00:00',b'1',1),
+ (2,'明细层(DWD)','dwd','admin','所有角色',b'1',b'1',20,'从ods层经过ETL得到的明细数据,表示具体的事实','ALL','2021-09-01 00:00:00','2021-09-01 00:00:00',b'1',1),
+ (3,'汇总层(DWS)','dws','admin','所有角色',b'1',b'1',30,'由明细数据经过汇总得到的数据,主要由统计维度和指标构成','ALL','2021-09-01 00:00:00','2021-09-01 00:00:00',b'1',1);
+
+/*!40000 ALTER TABLE `dss_datawarehouse_layer` ENABLE KEYS */;
+UNLOCK TABLES;
+
+
+# 转储表 dss_datawarehouse_layer_generalize_rule
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_layer_generalize_rule`;
+
+CREATE TABLE `dss_datawarehouse_layer_generalize_rule` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `layer_id` bigint(20) NOT NULL,
+ `regex` varchar(255) COLLATE utf8_bin NOT NULL COMMENT '自动归纳表达式',
+ `identifier` varchar(255) COLLATE utf8_bin NOT NULL,
+ `en_identifier` varchar(255) COLLATE utf8_bin NOT NULL,
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `status` bit(1) NOT NULL DEFAULT b'1',
+ `lock_version` bigint(20) NOT NULL DEFAULT '1',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_modifier
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_modifier`;
+
+CREATE TABLE `dss_datawarehouse_modifier` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `modifier_type` varchar(64) COLLATE utf8_bin NOT NULL,
+ `modifier_type_en` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+ `theme_domain_id` bigint(20) DEFAULT NULL,
+ `layer_id` bigint(20) DEFAULT NULL,
+ `theme_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的theme的names',
+ `theme_area_en` varchar(1000) COLLATE utf8_bin DEFAULT NULL,
+ `layer_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的layer的names',
+ `layer_area_en` varchar(1000) COLLATE utf8_bin DEFAULT NULL,
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `is_available` bit(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `status` bit(1) NOT NULL DEFAULT b'1',
+ `lock_version` bigint(20) NOT NULL DEFAULT '1',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_modifier_list
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_modifier_list`;
+
+CREATE TABLE `dss_datawarehouse_modifier_list` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `modifier_id` bigint(20) NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `identifier` varchar(255) COLLATE utf8_bin NOT NULL,
+ `formula` varchar(255) COLLATE utf8_bin NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_statistical_period
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_statistical_period`;
+
+CREATE TABLE `dss_datawarehouse_statistical_period` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `theme_domain_id` bigint(20) NOT NULL,
+ `layer_id` bigint(20) NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `en_name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `start_time_formula` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `end_time_formula` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `owner` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `is_available` bit(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `status` bit(1) NOT NULL DEFAULT b'1',
+ `lock_version` bigint(20) NOT NULL DEFAULT '1',
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_table_rule
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_table_rule`;
+
+CREATE TABLE `dss_datawarehouse_table_rule` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` bigint(20) NOT NULL,
+ `theme_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的theme的names',
+ `layer_area` varchar(1000) COLLATE utf8_bin NOT NULL COMMENT '空:代表所有,如果是逗号分隔的字符串则代表对应的layer的names',
+ `table_name_rule` varchar(1000) COLLATE utf8_bin NOT NULL,
+ `table_props_rule` varchar(1000) COLLATE utf8_bin NOT NULL,
+ `partation_rule` varchar(1000) COLLATE utf8_bin NOT NULL,
+ `column_rule` varchar(1000) COLLATE utf8_bin NOT NULL,
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `is_available` bit(1) NOT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_theme
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_theme`;
+
+CREATE TABLE `dss_datawarehouse_theme` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `theme_domain_id` bigint(20) NOT NULL,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `en_name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `parent_theme_name` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT '默认为空,如果不为空则指向父主题',
+ `owner` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `is_available` bit(1) NOT NULL,
+ `sort` int(4) NOT NULL,
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+
+# 转储表 dss_datawarehouse_theme_domain
+# ------------------------------------------------------------
+
+DROP TABLE IF EXISTS `dss_datawarehouse_theme_domain`;
+
+CREATE TABLE `dss_datawarehouse_theme_domain` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `en_name` varchar(255) COLLATE utf8_bin NOT NULL,
+ `owner` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `principal_name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT '授权的名字:userName、roleName',
+ `is_available` bit(1) NOT NULL,
+ `sort` int(4) NOT NULL,
+ `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+ `create_time` datetime NOT NULL,
+ `update_time` datetime NOT NULL,
+ `status` bit(1) NOT NULL DEFAULT b'1',
+ `lock_version` bigint(20) NOT NULL DEFAULT '1',
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+LOCK TABLES `dss_datawarehouse_theme_domain` WRITE;
+/*!40000 ALTER TABLE `dss_datawarehouse_theme_domain` DISABLE KEYS */;
+
+INSERT INTO `dss_datawarehouse_theme_domain` (`id`, `name`, `en_name`, `owner`, `principal_name`, `is_available`, `sort`, `description`, `create_time`, `update_time`, `status`, `lock_version`)
+VALUES
+ (1,'主题','英文名','负责人','New York',b'1',1,'描述','2021-09-28 13:18:48','2021-09-28 13:18:48',b'1',1);
+
+/*!40000 ALTER TABLE `dss_datawarehouse_theme_domain` ENABLE KEYS */;
+UNLOCK TABLES;
+
+
+
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
diff --git a/db/dss_ddl.sql b/db/dss_ddl.sql
index 938961d097..6a22e25abf 100644
--- a/db/dss_ddl.sql
+++ b/db/dss_ddl.sql
@@ -25,11 +25,7 @@ CREATE TABLE `dss_appconn_instance` (
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='dss instance的实例表';
-/*
----------------------------------------------------------------------
-------------------- DSS Orchestrator Framework ---------------------
----------------------------------------------------------------------
-*/
+
DROP TABLE IF EXISTS `dss_orchestrator_info`;
CREATE TABLE `dss_orchestrator_info` (
@@ -82,11 +78,7 @@ CREATE TABLE `dss_orchestrator_ref_orchestration_relation` (
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=326 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
-/*
----------------------------------------------------------------------
-------------------- DSS Project Framework ---------------------
----------------------------------------------------------------------
-*/
+
DROP TABLE IF EXISTS `dss_project`;
CREATE TABLE `dss_project` (
@@ -141,11 +133,6 @@ CREATE TABLE `dss_appconn_project_relation` (
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=90 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
-/*
----------------------------------------------------------------------
---------------------- DSS Workspace Framework ----------------------
----------------------------------------------------------------------
-*/
DROP TABLE IF EXISTS `dss_workspace`;
CREATE TABLE `dss_workspace` (
@@ -308,11 +295,7 @@ CREATE TABLE `dss_workspace_download_audit` (
PRIMARY KEY (`id`)
) ENGINE = INNODB DEFAULT CHARSET = utf8 COMMENT = '文件下载审计';
-/*
----------------------------------------------------------------------
---------------------------- DSS Workflow ---------------------------
----------------------------------------------------------------------
-*/
+
DROP TABLE IF EXISTS `dss_workflow`;
CREATE TABLE `dss_workflow` (
@@ -589,4 +572,3 @@ CREATE TABLE `dss_proxy_user` (
`remark` varchar(500) DEFAULT NULL COMMENT '备注',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=214 DEFAULT CHARSET=utf8;
-
diff --git a/db/dss_dml.sql b/db/dss_dml.sql
index 7ae770c638..cfbab82387 100644
--- a/db/dss_dml.sql
+++ b/db/dss_dml.sql
@@ -3,16 +3,28 @@ INSERT INTO `dss_appconn` (`id`, `appconn_name`, `is_user_need_init`, `level`, `
VALUES (1,'sso',0,1,0,0,NULL,"com.webank.wedatasphere.dss.appconn.sso.SSOAppConn",NULL,NULL),
(2,'scriptis',0,1,0,0,"sso",NULL,NULL,NULL),
(3,'workflow',0,1,1,0,NULL,'com.webank.wedatasphere.dss.appconn.workflow.WorkflowAppConn','/appcom/Install/dss/dss-appconns/workflow',NULL),
-(4,'apiservice',0,1,0,0,"sso",NULL,NULL,NULL);
+(4,'apiservice',0,1,0,0,'sso',NULL,NULL,NULL),
+(5,'datamodel',0,1,0,0,'sso',NULL,NULL,NULL),
+(6,'warehouse',0,1,0,0,'sso',NULL,NULL,NULL),
+(7,'dataasset',0,1,0,0,'sso',NULL,NULL,NULL),
+(8,'dataservice',0,1,0,0,'sso',NULL,NULL,NULL);
DELETE FROM dss_appconn_instance;
select @scriptis_appconn_id:= id from dss_appconn where appconn_name="scriptis";
select @workflow_appconn_id:= id from dss_appconn where appconn_name="workflow";
select @apiservice_appconn_id:= id from dss_appconn where appconn_name="apiservice";
+select @datamodel_appconn_id:= id from dss_appconn where appconn_name="datamodel";
+select @warehouse_appconn_id:= id from dss_appconn where appconn_name="warehouse";
+select @dataasset_appconn_id:= id from dss_appconn where appconn_name="dataasset";
+select @dataservice_appconn_id:= id from dss_appconn where appconn_name="dataservice";
INSERT INTO `dss_appconn_instance` (`id`, `appconn_id`, `label`, `url`, `enhance_json`, `homepage_uri`)
VALUES (2, @scriptis_appconn_id, 'DEV', '/home', '', ''),
(3, @workflow_appconn_id,'DEV','/workspaceHome','',''),
-(4, @apiservice_appconn_id, 'DEV', '/apiservices', '', '');
+(4, @apiservice_appconn_id, 'DEV', '/apiservices', '', ''),
+(5,@datamodel_appconn_id,'DEV','/dataModelCenter',NULL,'tableManage/tableSearch'),
+(6,@warehouse_appconn_id,'DEV','/dataWarehouseDesign',NULL,'themeDomains'),
+(7,@dataasset_appconn_id,'DEV','/dataAssetManage',NULL,'overview'),
+(8,@dataservice_appconn_id,'DEV','/dataService','','');
DELETE FROM dss_workspace;
insert into `dss_workspace`(`id`, `name`,`label`,`description`,`create_by`,`create_time`,`department`,`product`,`source`,`last_update_time`,`last_update_user`,`workspace_type`)
@@ -42,16 +54,10 @@ insert into `dss_workspace_dictionary`(`workspace_id`, `parent_key`, `dic_name`,
DELETE FROM dss_sidebar;
insert into `dss_sidebar`(`id`,`workspace_id`,`name`,`name_en`,`title`,`title_en`,`type`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (2,0,'菜单','Menu','菜单','Menu',1,1,NULL,'SYSTEM','2020-12-15 13:21:06',NULL,'2021-02-23 09:45:50');
--- insert into `dss_sidebar`(`id`,`workspace_id`,`name`,`name_en`,`title`,`title_en`,`type`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (3,0,'常见问题','Common problem','常见问题','Common problem',1,1,NULL,'SYSTEM','2020-12-15 13:21:06',NULL,'2021-02-23 09:46:18');
DELETE FROM dss_sidebar_content;
insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (3,0,2,NULL,NULL,'工作空间管理','Workspace management','/workspaceManagement/productsettings',0,'menuIcon',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-02-23 09:47:49');
insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (4,0,2,NULL,NULL,'UDF管理','UDF management','dss/linkis/?noHeader=1&noFooter=1#/urm/udfManagement',1,'menuIcon',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-02-23 09:47:11');
--- insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (5,0,3,NULL,NULL,'资源配置说明',NULL,'http://127.0.0.1:8088/kn/d/38',1,'fi-warn',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-01-12 17:16:52');
--- insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (6,0,3,NULL,NULL,'Spark使用指南','[Discussion on error code 22223]','http://127.0.0.1:8088/kn/d/40',1,'fi-warn',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-02-23 09:48:28');
--- insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (7,0,3,NULL,NULL,'Hive语法介绍',NULL,'http://127.0.0.1:8088/kn/d/34',1,'fi-warn',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-01-12 17:17:00');
--- insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (8,0,3,NULL,NULL,'工作流使用介绍',NULL,'http://127.0.0.1:8088/kn/d/42',1,'fi-warn',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-01-12 17:17:01');
--- insert into `dss_sidebar_content`(`id`,`workspace_id`,`sidebar_id`,`name`,`name_en`,`title`,`title_en`,`url`,`url_type`,`icon`,`order_num`,`remark`,`create_user`,`create_time`,`update_user`,`update_time`) values (9,0,3,NULL,NULL,'数据服务使用介绍','Discussion on error code 22223','http://127.0.0.1:8088/kn/d/32',1,'fi-warn',1,NULL,'SYSTEM','2020-12-15 13:21:07',NULL,'2021-02-23 09:48:19');
DELETE FROM dss_workspace_menu;
INSERT INTO `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `description`, `is_active`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`) VALUES('1','数据交换','data exchange','数据交换','数据交换描述','1',NULL,NULL,NULL,NULL,NULL,NULL);
@@ -61,6 +67,7 @@ INSERT INTO `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `descrip
INSERT INTO `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `description`, `is_active`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`) VALUES('5','管理员功能','administrator function','管理员功能','管理员功能描述','0',NULL,NULL,NULL,NULL,NULL,NULL);
insert into `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `description`, `is_active`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`) values('6','数据应用','data application','数据应用','数据应用描述','1',NULL,NULL,NULL,NULL,NULL,NULL);
insert into `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `description`, `is_active`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`) values('7','应用开发','application development','应用开发','应用开发描述','1',NULL,NULL,NULL,NULL,NULL,NULL);
+insert into `dss_workspace_menu` (`id`, `name`, `title_en`, `title_cn`, `description`, `is_active`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`) values('8','数据管理','data managee','数据管理','数据管理描述','1',NULL,NULL,NULL,NULL,NULL,NULL);
DELETE FROM dss_workspace_menu_appconn;
INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
@@ -68,8 +75,17 @@ VALUES (@scriptis_appconn_id, 2, 'Scriptis', 'Scriptis', 'Scriptis is a one-stop
INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
VALUES (@workflow_appconn_id, 2, 'workflow', '工作流开发', '工作流开发', '工作流开发', null, null, 1, 'Enter workflow', '进入 工作流开发', null, null, null, null, null, null, null, null, null, null);
INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
-VALUES (@apiservice_appconn_id, 7, 'dataService', '数据服务', '/dataService', '/dataService', null, null, 1, 'Enter dataService', '进入 数据服务', null, null, null, null, null, null, null, null, null, null);
+VALUES (@apiservice_appconn_id, 7, 'SparkdataService', 'spark数据服务', '/dataService', '/dataService', null, null, 1, 'Enter dataService', '进入 spark数据服务', null, null, null, null, null, null, null, null, null, null);
+INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES (@dataservice_appconn_id, 7, 'dataService', '数据服务', '/dataService', '/dataService', null, null, 1, 'Enter dataService', '进入 数据服务', null, null, null, null, null, null, null, null, null, null);
+
+INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES (@datamodel_appconn_id, 8, 'datamodel', '数据模型管理', 'datamodel', '数据模型管理', null, null, 1, 'Enter datamodel', '进入 数据模型管理', null, null, null, null, null, null, null, null, null, null);
+INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES (@warehouse_appconn_id, 8, 'warehouse', '数据仓库管理', 'warehouse', '数据仓库管理', null, null, 1, 'Enter warehouse', '进入 数据仓库管理', null, null, null, null, null, null, null, null, null, null);
+INSERT INTO dss_workspace_menu_appconn (appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES (@dataasset_appconn_id, 8, 'dataasset', '数据总览', 'dataasset', '数据总览', null, null, 1, 'Enter dataasset', '进入 数据总览', null, null, null, null, null, null, null, null, null, null);
DELETE FROM dss_workspace_role;
INSERT INTO `dss_workspace_role` (`id`, `workspace_id`, `name`, `front_name`, `update_time`, `description`) VALUES('1','-1','admin','管理员','2020-07-13 02:43:35','通用角色管理员');
@@ -123,7 +139,6 @@ insert into `dss_workflow_node_to_group`(`node_id`,`group_id`) values (10, @fun
insert into `dss_workflow_node_to_group`(`node_id`,`group_id`) values (12, @function_node_groupId);
DELETE FROM dss_workflow_node_ui;
--- todo msg.topic在receiver和sender使用了重复key
insert into `dss_workflow_node_ui`(`id`,`key`,`description`,`description_en`,`lable_name`,`lable_name_en`,`ui_type`,`required`,`value`,`default_value`,`is_hidden`,`condition`,`is_advanced`,`order`,`node_menu_type`,`is_base_info`,`position`) values (1,'title','请填写节点名称','Please enter node name','节点名','Node name','Input',1,NULL,NULL,0,NULL,0,1,1,1,'node');
insert into `dss_workflow_node_ui`(`id`,`key`,`description`,`description_en`,`lable_name`,`lable_name_en`,`ui_type`,`required`,`value`,`default_value`,`is_hidden`,`condition`,`is_advanced`,`order`,`node_menu_type`,`is_base_info`,`position`) values (3,'desc','请填写节点描述','Please enter the node description','节点描述','Node description','Text',0,NULL,NULL,0,NULL,0,4,1,1,'node');
insert into `dss_workflow_node_ui`(`id`,`key`,`description`,`description_en`,`lable_name`,`lable_name_en`,`ui_type`,`required`,`value`,`default_value`,`is_hidden`,`condition`,`is_advanced`,`order`,`node_menu_type`,`is_base_info`,`position`) values (5,'businessTag',NULL,NULL,'业务标签','businessTag','Tag',0,NULL,NULL,0,NULL,0,2,1,1,'node');
@@ -309,7 +324,7 @@ insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_ra
insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('58','Regex','(.+)@(.+)@(.+)','此格式错误,例如:ProjectName@WFName@jobName','Invalid format,example:ProjectName@WFName@jobName','blur');
INSERT INTO `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('59','OFT','["true","false"]','请填写是否复用引擎,false:不复用,true:复用','Please fill in whether or not to reuse engine, true: reuse, false: not reuse','blur');
insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('60', 'Regex', '^[0-9.]*g{0,1}$', 'Spark内存设置如2g', 'Drive memory size, default value: 2', 'blur');
-insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('61','Regex','^(.|\s){1,500}$','长度在1到5000个字符','The length is between 1 and 5000 characters','blur');
+insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('61','Regex','^.{1,5000}$','长度在1到5000个字符','The length is between 1 and 5000 characters','blur');
insert into `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_range`, `error_msg`, `error_msg_en`, `trigger`) values('62','Regex','^.{1,150}$','长度在1到150个字符','The length is between 1 and 150 characters','blur');
DELETE FROM dss_workflow_node_ui_to_validate;
@@ -408,9 +423,35 @@ INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id
INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@workflow_appconn_id,'6','1',now(),'system');
INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@workflow_appconn_id,'7','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'1','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'2','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'3','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'4','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'5','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'6','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@datamodel_appconn_id,'7','1',now(),'system');
+
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'1','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'2','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'3','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'4','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'5','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'6','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@warehouse_appconn_id,'7','1',now(),'system');
+
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'1','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'2','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'3','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'4','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'5','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'6','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',@dataasset_appconn_id,'7','1',now(),'system');
+
INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@scriptis_appconn_id,'1','1',now(),'system');
INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@workflow_appconn_id,'1','1',now(),'system');
INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@apiservice_appconn_id,'1','1',now(),'system');
-
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@datamodel_appconn_id,'1','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@warehouse_appconn_id,'1','1',now(),'system');
+INSERT INTO `dss_workspace_appconn_role` (`workspace_id`, `appconn_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('224',@dataasset_appconn_id,'1','1',now(),'system');
INSERT INTO `dss_workspace_admin_dept` (`id`, `parent_id`, `ancestors`, `dept_name`, `order_num`, `leader`, `phone`, `email`, `status`, `del_flag`, `create_by`, `create_time`, `update_by`, `update_time`) VALUES('100','0','0','基础科技','0','leader01','1888888888','123@qq.com','0','0','admin',now(),'admin',now());
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
index 53ed9785f2..3a706fb086 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml b/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
index 29c79358fa..076e0784db 100644
--- a/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
@@ -6,7 +6,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/dolphinscheduler/conversion/WorkflowToDolphinSchedulerSynchronizer.java b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/dolphinscheduler/conversion/WorkflowToDolphinSchedulerSynchronizer.java
index 745f0c2363..353bcadd00 100644
--- a/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/dolphinscheduler/conversion/WorkflowToDolphinSchedulerSynchronizer.java
+++ b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/dolphinscheduler/conversion/WorkflowToDolphinSchedulerSynchronizer.java
@@ -13,6 +13,7 @@
import com.webank.wedatasphere.dss.workflow.core.entity.Workflow;
import com.webank.wedatasphere.dss.workflow.core.entity.WorkflowNode;
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import java.util.List;
@@ -47,11 +48,12 @@ public void syncToRel(ConvertedRel convertedRel) {
updateOperation.updateOrchestration(ref);
}
+
private void checkSchedulerProject(Workflow flow) throws ExternalOperationFailedException {
List nodes = flow.getWorkflowNodes();
for (WorkflowNode node : nodes) {
DSSNode dssNode = node.getDSSNode();
- if (CollectionUtils.isEmpty(dssNode.getResources())) {
+ if (CollectionUtils.isEmpty(dssNode.getResources()) && MapUtils.isEmpty(dssNode.getJobContent())) {
throw new ExternalOperationFailedException(90021, dssNode.getName() + "节点内容不能为空");
}
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
index 5cd39b200b..64a0a9b276 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
index 1f95a9a6dd..5fef86822a 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-scriptis-appconn/pom.xml b/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
index d4f7feac3e..75828cdaaa 100644
--- a/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sendemail-appconn/pom.xml b/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
index 44db21489b..8d9f125bcc 100644
--- a/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
index 1633694949..7b784811f3 100644
--- a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
+++ b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sso-appconn/pom.xml b/dss-appconn/appconns/dss-sso-appconn/pom.xml
index 74e1b099ed..b66c806c18 100644
--- a/dss-appconn/appconns/dss-sso-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-sso-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-workflow-appconn/pom.xml b/dss-appconn/appconns/dss-workflow-appconn/pom.xml
index d9c115b1d5..17dc4f401c 100644
--- a/dss-appconn/appconns/dss-workflow-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-workflow-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-appconn-core/pom.xml b/dss-appconn/dss-appconn-core/pom.xml
index 365d9b910b..9d91fe1779 100644
--- a/dss-appconn/dss-appconn-core/pom.xml
+++ b/dss-appconn/dss-appconn-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/dss-appconn-loader/pom.xml b/dss-appconn/dss-appconn-loader/pom.xml
index 88f859a160..b5c20a167f 100644
--- a/dss-appconn/dss-appconn-loader/pom.xml
+++ b/dss-appconn/dss-appconn-loader/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
index af145c11ea..40aac210e5 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
index 976c3a1b03..64be244aad 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/pom.xml b/dss-appconn/dss-appconn-manager/pom.xml
index e720464f3c..96e66b69b3 100644
--- a/dss-appconn/dss-appconn-manager/pom.xml
+++ b/dss-appconn/dss-appconn-manager/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/dss-scheduler-appconn/pom.xml b/dss-appconn/dss-scheduler-appconn/pom.xml
index 465c6d46e8..295310559c 100644
--- a/dss-appconn/dss-scheduler-appconn/pom.xml
+++ b/dss-appconn/dss-scheduler-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/linkis-appconn-engineplugin/pom.xml b/dss-appconn/linkis-appconn-engineplugin/pom.xml
index 1696b4e240..ed86fad5a3 100644
--- a/dss-appconn/linkis-appconn-engineplugin/pom.xml
+++ b/dss-appconn/linkis-appconn-engineplugin/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
4.0.0
diff --git a/dss-appconn/pom.xml b/dss-appconn/pom.xml
index 0d6cd24cb2..a89bbcff01 100644
--- a/dss-appconn/pom.xml
+++ b/dss-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../pom.xml
4.0.0
diff --git a/dss-apps/dss-apiservice-server/pom.xml b/dss-apps/dss-apiservice-server/pom.xml
index 33c561f337..dae047bc7a 100644
--- a/dss-apps/dss-apiservice-server/pom.xml
+++ b/dss-apps/dss-apiservice-server/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../pom.xml
4.0.0
diff --git a/dss-apps/dss-data-api/dss-api-sql-template/pom.xml b/dss-apps/dss-data-api/dss-api-sql-template/pom.xml
index 6c9dde2535..1364fdb663 100644
--- a/dss-apps/dss-data-api/dss-api-sql-template/pom.xml
+++ b/dss-apps/dss-data-api/dss-api-sql-template/pom.xml
@@ -5,7 +5,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
diff --git a/dss-apps/dss-data-api/dss-data-api-server/pom.xml b/dss-apps/dss-data-api/dss-data-api-server/pom.xml
index 25f716e927..9037dfa6cf 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/pom.xml
+++ b/dss-apps/dss-data-api/dss-data-api-server/pom.xml
@@ -5,7 +5,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0
+ 1.2.0
../../../pom.xml
4.0.0
@@ -133,7 +133,7 @@
com.webank.wedatasphere.dss
dss-sso-integration-standard
- 1.1.0
+ 1.2.0
provided
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/entity/ApiConfig.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/entity/ApiConfig.java
index 8e4ea5a166..afa8675d18 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/entity/ApiConfig.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/entity/ApiConfig.java
@@ -31,7 +31,7 @@ public class ApiConfig {
private String method;
@TableField("`describe`")
private String describe;
- @NotBlank(message = "datasource_id不能为空")
+ @NotNull(message = "datasource_id不能为空")
@TableField(value = "datasource_id")
private Integer datasourceId;
private String tblName;
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiAuthRestful.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiAuthRestful.java
index eb1e5ca16c..125c75abb8 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiAuthRestful.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiAuthRestful.java
@@ -88,7 +88,7 @@ public Message getApiAuthList(HttpServletRequest httpServletRequest, @RequestPar
}
Workspace workspace = SSOHelper.getWorkspace(httpServletRequest);
List totals = new ArrayList<>();
- List apiAuths = apiAuthService.getApiAuthList(Long.valueOf(workspace.getWorkspaceName()),
+ List apiAuths = apiAuthService.getApiAuthList(workspaceId,
caller, totals, pageNow, pageSize);
return Message.ok().data("list", apiAuths).data("total", totals.get(0));
}
@@ -108,7 +108,7 @@ public Message getApiGroup(HttpServletRequest httpServletRequest, @RequestParam(
Workspace workspace = SSOHelper.getWorkspace(httpServletRequest);
log.info("workspace is {}", workspace.getWorkspaceName());
- List apiGroupInfoList = apiAuthService.getApiGroupList(Long.valueOf(workspace.getWorkspaceName()));
+ List apiGroupInfoList = apiAuthService.getApiGroupList(workspaceId);
Message message = Message.ok().data("list", apiGroupInfoList);
return message;
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiConfigRestful.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiConfigRestful.java
index e144cbee29..6ed7457a2b 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiConfigRestful.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiConfigRestful.java
@@ -95,7 +95,7 @@ public Message getApiList(HttpServletRequest httpServletRequest,
@RequestParam(value = "workspaceId", required = false) String workspaceId) {
Workspace workspace = SSOHelper.getWorkspace(httpServletRequest);
- List list = apiConfigService.getGroupList(workspace.getWorkspaceName());
+ List list = apiConfigService.getGroupList(workspaceId);
Message message = Message.ok().data("list", list);
return message;
}
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiDataSourceRestful.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiDataSourceRestful.java
index 2da04f30ab..19457549bd 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiDataSourceRestful.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiDataSourceRestful.java
@@ -34,7 +34,7 @@ public class DSSDbApiDataSourceRestful {
public Message connect(HttpServletRequest httpServletRequest, @RequestParam(value = "workspaceId", required = false) Integer workspaceId,
@RequestParam("type") String type) {
Workspace workspace = SSOHelper.getWorkspace(httpServletRequest);
- List allConnections = dssDbApiDataSourceService.getAllConnections(Integer.valueOf(workspace.getWorkspaceName()), type);
+ List allConnections = dssDbApiDataSourceService.getAllConnections(workspaceId, type);
return Message.ok().data("availableConns", allConnections);
}
@@ -75,7 +75,7 @@ public Message getAllDs(HttpServletRequest httpServletRequest,
@RequestParam("type") String type, @RequestParam("name") String name) {
Workspace workspace = SSOHelper.getWorkspace(httpServletRequest);
DataSource dataSource = new DataSource();
- dataSource.setWorkspaceId(Integer.valueOf(workspace.getWorkspaceName()));
+ dataSource.setWorkspaceId(workspaceId);
dataSource.setType(type);
dataSource.setName(name);
List allDatasource = dssDbApiDataSourceService.listAllDatasources(dataSource);
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiManagerRestful.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiManagerRestful.java
index 0c25c3432f..05a39e1ada 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiManagerRestful.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiManagerRestful.java
@@ -33,7 +33,7 @@ public Message getApiList(HttpServletRequest request,
}
Workspace workspace = SSOHelper.getWorkspace(request);
List totals = new ArrayList<>();
- List apiInfoList = apiManagerService.getApiInfoList(Long.valueOf(workspace.getWorkspaceName())
+ List apiInfoList = apiManagerService.getApiInfoList(workspaceId
, apiName, totals, pageNow, pageSize);
return Message.ok().data("list", apiInfoList).data("total", totals.get(0));
}
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiMonitorRestful.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiMonitorRestful.java
index ee1d9cd8da..9b514450cf 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiMonitorRestful.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/restful/DSSDbApiMonitorRestful.java
@@ -45,7 +45,7 @@ public Message getApiList(HttpServletRequest request,
Workspace workspace = SSOHelper.getWorkspace(request);
LOGGER.info("workspace is: {}", workspace.getWorkspaceName());
List totals = new ArrayList<>();
- List apiInfoList = apiManagerService.getOnlineApiInfoList(Long.valueOf(workspace.getWorkspaceName()),
+ List apiInfoList = apiManagerService.getOnlineApiInfoList(workspaceId,
apiName, totals, pageNow, pageSize);
return Message.ok().data("list", apiInfoList).data("total", totals.get(0));
}
@@ -55,7 +55,7 @@ public Message getApiList(HttpServletRequest request,
public Message getOnlineApiCnt(HttpServletRequest request, @RequestParam(value = "workspaceId", required = false) Long workspaceId) {
Workspace workspace = SSOHelper.getWorkspace(request);
LOGGER.info("workspace is: {}", workspace.getWorkspaceName());
- return Message.ok().data("onlineApiCnt", apiMonitorService.getOnlineApiCnt(Long.valueOf(workspace.getWorkspaceName())));
+ return Message.ok().data("onlineApiCnt", apiMonitorService.getOnlineApiCnt(workspaceId));
}
@@ -63,7 +63,7 @@ public Message getOnlineApiCnt(HttpServletRequest request, @RequestParam(value =
public Message getOfflineApiCnt(HttpServletRequest request, @RequestParam(value = "workspaceId", required = false) Long workspaceId) {
Workspace workspace = SSOHelper.getWorkspace(request);
LOGGER.info("workspace is: {}", workspace.getWorkspaceName());
- return Message.ok().data("offlineApiCnt", apiMonitorService.getOfflineApiCnt(Long.valueOf(workspace.getWorkspaceName())));
+ return Message.ok().data("offlineApiCnt", apiMonitorService.getOfflineApiCnt(workspaceId));
}
@@ -99,7 +99,7 @@ public Message getCallCntForPast24H(HttpServletRequest request, @RequestParam(va
throws Exception {
Workspace workspace = SSOHelper.getWorkspace(request);
LOGGER.info("workspace is: {}", workspace.getWorkspaceName());
- return Message.ok().data("list", apiMonitorService.getCallCntForPast24H(Long.valueOf(workspace.getWorkspaceName())));
+ return Message.ok().data("list", apiMonitorService.getCallCntForPast24H(workspaceId));
}
/**
diff --git a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/util/CryptoUtils.java b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/util/CryptoUtils.java
index 0dc82b1c67..f795d53f0a 100644
--- a/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/util/CryptoUtils.java
+++ b/dss-apps/dss-data-api/dss-data-api-server/src/main/java/com/webank/wedatasphere/dss/data/api/server/util/CryptoUtils.java
@@ -8,7 +8,7 @@
import java.io.Serializable;
import java.security.MessageDigest;
import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
public class CryptoUtils {
private CryptoUtils() {
diff --git a/dss-apps/dss-data-api/pom.xml b/dss-apps/dss-data-api/pom.xml
index 957a6ca8f0..05088d6f26 100644
--- a/dss-apps/dss-data-api/pom.xml
+++ b/dss-apps/dss-data-api/pom.xml
@@ -6,7 +6,7 @@
dss
com.webank.wedatasphere.dss
../../pom.xml
- 1.1.0
+ 1.2.0
4.0.0
dss-data-api
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/pom.xml b/dss-apps/dss-data-governance/dss-data-asset-server/pom.xml
deleted file mode 100644
index 88f0ec3fb1..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/pom.xml
+++ /dev/null
@@ -1,223 +0,0 @@
-
-
-
- dss
- com.webank.wedatasphere.dss
- 1.1.0
- ../../../pom.xml
-
- 4.0.0
-
- dss-data-asset-server
-
-
- 2.1.0
-
-
-
- com.webank.wedatasphere.dss
- dss-data-governance-common
- ${dss.version}
-
-
-
- org.apache.linkis
- linkis-module
- ${linkis.version}
- provided
-
-
- org.springframework.cloud
- spring-cloud-netflix
-
-
- spring-cloud-starter-netflix-eureka-client
- org.springframework.cloud
-
-
- javax.ws.rs
- javax.ws.rs-api
-
-
-
-
- org.apache.commons
- commons-math3
- provided
-
-
- xstream
- com.thoughtworks.xstream
-
-
- org.springframework.cloud
- spring-cloud-starter-netflix-eureka-client
- ${spring.cloud.version}
- provided
-
-
- logback-classic
- ch.qos.logback
-
-
- log4j-to-slf4j
- org.apache.logging.log4j
-
-
- gson
- com.google.code.gson
-
-
- jsr311-api
- javax.ws.rs
-
-
- xstream
- com.thoughtworks.xstream
-
-
- commons-math
- org.apache.commons
-
-
- jackson-core
- com.fasterxml.jackson.core
-
-
- spring-boot-autoconfigure
- org.springframework.boot
-
-
- spring-boot-starter-aop
- org.springframework.boot
-
-
- spring-boot-starter
- org.springframework.boot
-
-
- spring-boot-starter-cache
- org.springframework.boot
-
-
-
-
- org.apache.linkis
- linkis-mybatis
- ${linkis.version}
-
-
- com.webank.wedatasphere.dss
- dss-common
- ${dss.version}
- provided
-
-
-
- org.apache.atlas
- atlas-client-v2
- ${atlas.version}
-
-
- com.google.guava
- guava
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
- org.projectlombok
- lombok
- 1.18.16
- compile
-
-
-
- com.alibaba
- druid
- 1.1.9
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-deploy-plugin
-
-
-
- net.alchim31.maven
- scala-maven-plugin
-
-
- org.apache.maven.plugins
- maven-jar-plugin
-
-
- org.apache.maven.plugins
- maven-assembly-plugin
- 2.3
- false
-
-
- make-assembly
- package
-
- single
-
-
-
- src/main/assembly/distribution.xml
-
-
-
-
-
- false
- out
- false
- false
-
- src/main/assembly/distribution.xml
-
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
-
- 8
-
-
-
-
-
- src/main/java
-
- **/*.xml
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/MetaInfoMapper.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/MetaInfoMapper.java
deleted file mode 100644
index bb2b0ff5a1..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/MetaInfoMapper.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.dao;
-
-import com.webank.wedatasphere.dss.data.asset.entity.HivePartInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveStorageInfo;
-import org.apache.ibatis.annotations.*;
-
-import java.sql.SQLException;
-import java.util.List;
-
-@Mapper
-public interface MetaInfoMapper {
- Long getTableStorage() throws SQLException;
- List getTop10Table() throws SQLException;
- int getTableInfo(@Param("dbName") String dbName,@Param("tableName") String tableName,@Param("isPartTable") Boolean isPartTable) throws SQLException;
-
- List getPartInfo(@Param("dbName") String dbName, @Param("tableName") String tableName) throws SQLException;
-
-
-
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/MetaInfoMapperImpl.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/MetaInfoMapperImpl.java
deleted file mode 100644
index 72989801d2..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/MetaInfoMapperImpl.java
+++ /dev/null
@@ -1,174 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.dao.impl;
-
-import com.webank.wedatasphere.dss.data.asset.dao.MetaInfoMapper;
-import com.webank.wedatasphere.dss.data.asset.entity.HivePartInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveStorageInfo;
-import com.webank.wedatasphere.dss.data.common.exception.DAOException;
-import com.webank.wedatasphere.dss.data.common.utils.DataSourceUtil;
-import com.webank.wedatasphere.dss.data.common.utils.DateUtil;
-
-import javax.sql.DataSource;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-public class MetaInfoMapperImpl implements MetaInfoMapper {
- @Override
- public Long getTableStorage() throws SQLException {
- DataSource dataSource = DataSourceUtil.getDataSource();
-
- Connection con =dataSource.getConnection();
- long num=0;
- PreparedStatement ps=null;
- ResultSet rs=null;
- try {
- String sql="select SUM(PARAM_VALUE) from TABLE_PARAMS WHERE PARAM_KEY='totalSize'";
- ps=con.prepareStatement(sql);
- rs=ps.executeQuery();
- while (rs.next()){
- num =rs.getLong(1);
- }
- String sql2 ="select SUM(PARAM_VALUE) from PARTITION_PARAMS WHERE PARAM_KEY='totalSize'";
- ps=con.prepareStatement(sql2);
- rs=ps.executeQuery();
- while (rs.next()){
- num= num + rs.getLong(1);
- }
-
- } catch (DAOException | SQLException e){
- throw new DAOException(e.getMessage(),e);
- }
- finally {
- con.close();
- }
-
- return num;
- }
-
- @Override
- public List getTop10Table() throws SQLException{
- DataSource dataSource = DataSourceUtil.getDataSource();
- Connection con =dataSource.getConnection();
- PreparedStatement ps=null;
- ResultSet rs=null;
- List hiveStorageInfos = new ArrayList<>();
- try {
- String sql="SELECT DBS.NAME ,TBLS.TBL_NAME,CAST(TABLE_PARAMS.PARAM_VALUE AS UNSIGNED) AS totalSize from DBS, TBLS,TABLE_PARAMS where TBLS.TBL_ID=TABLE_PARAMS.TBL_ID AND TBLS.DB_ID=DBS.DB_ID AND TABLE_PARAMS.PARAM_KEY='totalSize' order by totalSize DESC limit 10";
- ps=con.prepareStatement(sql);
- rs=ps.executeQuery();
- while (rs.next()){
- HiveStorageInfo tableinfo=new HiveStorageInfo();
- tableinfo.setTableName(rs.getString(1)+"."+rs.getString(2));
- tableinfo.setStorage(rs.getLong(3));
- hiveStorageInfos.add(tableinfo);
- }
- String sql2="SELECT DBS.NAME ,TBLS.TBL_NAME,SUM(CAST(PARTITION_PARAMS.PARAM_VALUE AS UNSIGNED)) AS totalSize from DBS,TBLS,PARTITIONS ,PARTITION_PARAMS where DBS.DB_ID=TBLS.DB_ID AND TBLS.TBL_ID=PARTITIONS.TBL_ID AND PARTITIONS.PART_ID =PARTITION_PARAMS.PART_ID AND PARTITION_PARAMS.PARAM_KEY='totalSize' group by TBLS.TBL_NAME order by totalSize desc limit 10";
- ps=con.prepareStatement(sql2);
- rs=ps.executeQuery();
- while (rs.next()){
- HiveStorageInfo tableinfo=new HiveStorageInfo();
- tableinfo.setTableName(rs.getString(1)+"."+rs.getString(2));
- tableinfo.setStorage(rs.getLong(3));
- hiveStorageInfos.add(tableinfo);
- }
- /**
- * 特别注意LONG类型相减超出INT范围
- * System.out.println((int) (4401131805L -1796673800L))
- * System.out.println(Long.parseLong("4401131805")-Long.parseLong("1796673800"))
- */
- Collections.sort(hiveStorageInfos, new Comparator() {
- @Override
- public int compare(HiveStorageInfo o1, HiveStorageInfo o2) {
- //return (int) (Long.valueOf(o2.getStorage())-Long.valueOf(o1.getStorage()))
- if(o2.getStorage() > o1.getStorage()){
- return 1;
- }
- else if(o2.getStorage() < o1.getStorage()){
- return -1;
- }
- else{
- return 0;
- }
- }
- });
- } catch (DAOException | SQLException e){
- throw new DAOException(e.getMessage(),e);
- }
- finally {
- con.close();
- }
- return hiveStorageInfos.subList(0,10);
- }
-
- @Override
- public int getTableInfo(String dbName, String tableName, Boolean isPartTable) throws SQLException {
- DataSource dataSource = DataSourceUtil.getDataSource();
- Connection con =dataSource.getConnection();
- PreparedStatement ps=null;
- ResultSet rs=null;
- int res = 0;
- try {
- String sql=null;
- if(isPartTable==false){
- sql="select TABLE_PARAMS.PARAM_VALUE as totalSize from DBS, TBLS,TABLE_PARAMS where TBLS.TBL_ID=TABLE_PARAMS.TBL_ID AND TBLS.DB_ID=DBS.DB_ID AND TABLE_PARAMS.PARAM_KEY='totalSize' AND DBS.NAME="+"'"+dbName+"' AND TBLS.TBL_NAME="+"'"+tableName+"'";
- }
- else {
-
- sql="select SUM(PARTITION_PARAMS.PARAM_VALUE) as totalSize from DBS,TBLS,PARTITIONS ,PARTITION_PARAMS where DBS.DB_ID=TBLS.DB_ID AND TBLS.TBL_ID=PARTITIONS.TBL_ID AND PARTITIONS.PART_ID =PARTITION_PARAMS.PART_ID AND PARTITION_PARAMS.PARAM_KEY='totalSize' AND DBS.NAME="+"'"+dbName +"' AND TBLS.TBL_NAME="+"'"+tableName+"' group by TBLS.TBL_NAME";
- }
- ps=con.prepareStatement(sql);
- rs=ps.executeQuery();
- while (rs.next()){
- res=rs.getInt(1);
- }
-
- } catch (DAOException | SQLException e){
- throw new DAOException(e.getMessage(),e);
- }
- finally {
- con.close();
- }
- return res;
- }
-
- @Override
- public List getPartInfo(String dbName, String tableName)throws SQLException {
- DataSource dataSource = DataSourceUtil.getDataSource();
- Connection con =dataSource.getConnection();
- PreparedStatement ps=null;
- ResultSet rs=null;
- List hivePartInfos = new ArrayList<>();
- try {
- String sql="select b.PART_NAME,b.CREATE_TIME,MAX(CASE c.PARAM_KEY WHEN 'transient_lastDdlTime' THEN c.PARAM_VALUE ELSE null END) transient_lastDdlTime ,MAX(CASE c.PARAM_KEY WHEN 'numRows' THEN c.PARAM_VALUE ELSE null END) numRows,MAX(CASE c.PARAM_KEY WHEN 'totalSize' THEN c.PARAM_VALUE ELSE null END) totalSize from TBLS a,PARTITIONS b,PARTITION_PARAMS c,DBS d where a.TBL_NAME="+"'"+tableName+"'"+"AND d.NAME="+"'"+dbName+"'" +"AND a.TBL_ID=b.TBL_ID AND a.DB_ID=d.DB_ID AND b.PART_ID=c.PART_ID GROUP BY c.PART_ID";
- ps=con.prepareStatement(sql);
- rs=ps.executeQuery();
- while (rs.next()){
- HivePartInfo part =new HivePartInfo();
- part.setPartName(rs.getString(1));
- Long lastAccessTime = Long.valueOf(rs.getInt(3));
- if(lastAccessTime !=null && lastAccessTime !=0L) {
- part.setLastAccessTime(DateUtil.unixToTimeStr(lastAccessTime * 1000));
- }
- Long createTime = Long.valueOf(rs.getInt(2));
- if(createTime !=null && createTime !=0L) {
- part.setCreateTime(DateUtil.unixToTimeStr(createTime * 1000));
- }
- part.setReordCnt(rs.getInt(4));
- part.setStore(rs.getInt(5));
- hivePartInfos.add(part);
- }
-
- } catch (DAOException | SQLException e){
- throw new DAOException(e.getMessage(),e);
- }
- finally {
- con.close();
- }
- return hivePartInfos;
- }
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/WorkspaceInfoMapper.xml b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/WorkspaceInfoMapper.xml
deleted file mode 100644
index 8dcfde4e33..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/dao/impl/WorkspaceInfoMapper.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveStorageInfo.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveStorageInfo.java
deleted file mode 100644
index 75d60bbb5f..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveStorageInfo.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.entity;
-
-import lombok.Data;
-
-@Data
-public class HiveStorageInfo {
- private String tableName;
- private Long storage;
- private String guid;
-}
\ No newline at end of file
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblClassificationInfo.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblClassificationInfo.java
deleted file mode 100644
index 7977271e83..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblClassificationInfo.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.entity;
-
-import lombok.Data;
-
-import java.util.List;
-
-@Data
-public class HiveTblClassificationInfo {
- private List oldClassifications;
- private List newClassifications;
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java
deleted file mode 100644
index 1204e2e9b0..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.entity;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-
-import java.io.Serializable;
-import java.util.List;
-import java.util.Set;
-
-@Data
-public class HiveTblDetailInfo implements Serializable {
- private HiveTblBasicInfo basic;
- private List columns;
- private List partitionKeys;
- private List classifications;
-
- @Data
- public static class HiveTblBasicInfo extends HiveTblSimpleInfo {
- private String store; //存储量
- private Boolean isParTbl; //是否分区表
- private String tableType; //Hive表类型 tableType: EXTERNAL_TABLE, MANAGED_TABLE
- private String location; //Hive表存储路径
- }
-
- @Data
- public static class HiveColumnInfo {
- private String name;
- private String type;
- private String guid;
- private String comment;
- }
-
- @Data
- @AllArgsConstructor
- public static class HiveClassificationInfo {
- private String typeName;
- private Set superTypeNames;
- private Set subTypeNames;
- }
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblLabelInfo.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblLabelInfo.java
deleted file mode 100644
index 5a4be4a35f..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblLabelInfo.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.entity;
-
-import lombok.Data;
-
-import java.util.Set;
-
-@Data
-public class HiveTblLabelInfo {
- private Set labels;
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetRestful.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetRestful.java
deleted file mode 100644
index 44c7dbebaa..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetRestful.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.restful;
-
-import com.webank.wedatasphere.dss.data.asset.entity.HiveTblLabelInfo;
-import com.webank.wedatasphere.dss.data.asset.service.AssetService;
-import com.webank.wedatasphere.dss.data.asset.service.WorkspaceInfoService;
-import org.apache.linkis.server.Message;
-import lombok.AllArgsConstructor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-
-import java.util.List;
-import java.util.Map;
-
-@RestController
-@RequestMapping(path = "/dss/data/governance/asset", produces = {"application/json"})
-@AllArgsConstructor
-public class AssetRestful {
- private static final Logger logger = LoggerFactory.getLogger(AssetRestful.class);
-
- private AssetService assetService;
- private WorkspaceInfoService workspaceInfoService;
-
- /**
- * 获取数据资产概要:hivedb数、hivetable数据、总存储量
- */
- @RequestMapping(method = RequestMethod.GET, path ="/hiveSummary")
- public Message getHiveSummary() throws Exception {
- return Message.ok().data("result", assetService.getHiveSummary());
- }
-
- /**
- * 修改单个表或单个列注释
- */
- @RequestMapping(method = RequestMethod.PUT, path ="/comment/{guid}")
- public Message modifyComment(@PathVariable String guid, @RequestParam String comment) throws Exception {
- comment="\""+comment+"\"";
- assetService.modifyComment(guid,comment);
- return Message.ok().data("result","修改成功");
- }
-
- /**
- * 批量修改多个个表或列注释
- */
- @RequestMapping(method = RequestMethod.PUT, path ="/comment/bulk")
- public Message modifyComment(@RequestBody Map commentMap) throws Exception {
- for (Map.Entry stringStringEntry : commentMap.entrySet()) {
- stringStringEntry.setValue("\""+stringStringEntry.getValue()+"\"");
- }
- assetService.bulkModifyComment(commentMap);
-
- return Message.ok().data("result","修改成功");
- }
-
-
- /**
- * 设置单个表或单个列的标签
- */
- @RequestMapping(method = RequestMethod.POST, path ="/label/{guid}")
- public Message setLabels(@PathVariable String guid, @RequestBody HiveTblLabelInfo hiveTblLabelInfo) throws Exception {
- assetService.setLabels(guid,hiveTblLabelInfo.getLabels());
-
- return Message.ok().data("result","设置成功");
- }
-
- /**
- * 删除单个表或单个列的标签,linkis-gateway不支持DELETE方法
- */
- @RequestMapping(method = RequestMethod.PUT, path ="/label/{guid}")
- public Message removeLabels(@PathVariable String guid, @RequestBody HiveTblLabelInfo hiveTblLabelInfo) throws Exception {
- assetService.removeLabels(guid,hiveTblLabelInfo.getLabels());
-
- return Message.ok().data("result","删除成功");
- }
-
- /**
- * 获取工作空间下所有用户名
- */
- @RequestMapping(method = RequestMethod.GET, path ="getWorkspaceUsers/{workspaceId}/{search}")
- public Message getWorkspaceUsers(@PathVariable int workspaceId,@PathVariable String search) throws Exception{
- String searchs="%"+search+"%";
- List workspaceUsers = workspaceInfoService.getWorkspaceUsers(workspaceId,searchs);
- return Message.ok().data("result",workspaceUsers);
-
- }
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java
deleted file mode 100644
index e740397533..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java
+++ /dev/null
@@ -1,187 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.restful;
-
-import com.webank.wedatasphere.dss.data.asset.entity.HivePartInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveStorageInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveTblClassificationInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveTblSimpleInfo;
-import com.webank.wedatasphere.dss.data.asset.service.AssetService;
-import com.webank.wedatasphere.dss.data.asset.service.WorkspaceInfoService;
-import org.apache.linkis.server.Message;
-import lombok.AllArgsConstructor;
-import org.apache.atlas.model.instance.AtlasClassification;
-import org.apache.atlas.model.lineage.AtlasLineageInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-
-@RestController
-@RequestMapping(path = "/dss/data/governance/asset/hiveTbl", produces = {"application/json"})
-@AllArgsConstructor
-public class AssetTblRestful {
- private static final Logger logger = LoggerFactory.getLogger(AssetTblRestful.class);
-
- private static final String DEFAULT_DIRECTION = "BOTH";
- private static final String DEFAULT_DEPTH = "3";
- private static final String DEFAULT_LIMIT = "10";
- private static final String DEFAULT_OFFSET = "0";
-
- private AssetService assetService;
- private WorkspaceInfoService workspaceInfoService;
-
- /**
- * 获取存储量前10的表信息
- */
- @RequestMapping(method = RequestMethod.GET, path ="/topStorage")
- public Message getTop10Storage() throws Exception{
- List top10Table = assetService.getTop10Table();
- for (HiveStorageInfo hiveStorageInfo : top10Table) {
- String qualifiedName=hiveStorageInfo.getTableName();
- String hiveTblGuid = assetService.getHiveTblGuid(qualifiedName);
- hiveStorageInfo.setGuid(hiveTblGuid);
- }
- return Message.ok().data("result",top10Table);
- }
-
- /**
- * 搜索hive表
- */
- @RequestMapping(method = RequestMethod.GET, path ="/search")
- public Message searchHiveTbl(@RequestParam(required = false) String classification,
- @RequestParam(defaultValue = "") String query,
- @RequestParam(defaultValue = "") String keyword,
- @RequestParam(defaultValue = DEFAULT_LIMIT) int limit,
- @RequestParam(defaultValue = DEFAULT_OFFSET) int offset) throws Exception {
- List hiveTblSimpleInfoList = assetService.searchHiveTable(classification,query.trim(),limit,offset);
- if(hiveTblSimpleInfoList ==null || keyword ==null || keyword.trim().equals("")) {
- return Message.ok().data("result",hiveTblSimpleInfoList);
- }
- else {
- Pattern regex = Pattern.compile(keyword);
- return Message.ok().data("result",hiveTblSimpleInfoList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
- }
- }
-
- /**
- * 获取单个表的详细信息,包括:基本信息、字段信息
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/basic")
- public Message getHiveTblBasic(@PathVariable String guid) throws Exception {
- return Message.ok().data("result",assetService.getHiveTblDetail(guid));
- }
-
- /**
- * 获取表分区信息
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/partition")
- public Message getHiveTblPartition(@PathVariable String guid) throws Exception {
- List hiveTblPartition = assetService.getHiveTblPartition(guid);
- if (hiveTblPartition.size()>0){
- return Message.ok().data("result",hiveTblPartition);
- }
- else {
- return Message.ok().data("result",null);
- }
- }
-
- /**
- * 获取表的血缘信息
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/lineage")
- public Message getHiveTblLineage(@PathVariable String guid,
- @RequestParam(defaultValue = DEFAULT_DIRECTION) AtlasLineageInfo.LineageDirection direction,
- @RequestParam(defaultValue = DEFAULT_DEPTH) int depth) throws Exception {
- return Message.ok().data("result",assetService.getHiveTblLineage(guid,direction,depth));
- }
-
- /**
- * 获取表的select语句
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/select")
- public Message getHiveTblSelect(@PathVariable String guid) throws Exception {
- return Message.ok().data("result",assetService.getTbSelect(guid));
- }
-
- /**
- * 获取表的create语句
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/create")
- public Message getHiveTblCreate(@PathVariable String guid) throws Exception {
- return Message.ok().data("result",assetService.getTbCreate(guid));
-
- }
-
- /**
- * 获取分类
- */
- @RequestMapping(method = RequestMethod.GET, path ="/{guid}/classifications")
- public Message getClassifications(@PathVariable String guid) throws Exception {
- return Message.ok().data("result",assetService.getClassifications(guid));
- }
-
- /**
- * 添加分类
- */
- @Deprecated
- @RequestMapping(method = RequestMethod.POST, path ="/{guid}/classifications")
- public Message addClassifications(@PathVariable String guid, @RequestBody List classifications) throws Exception {
- assetService.addClassifications(guid, classifications);
- return Message.ok().data("result","添加成功");
- }
-
-// /**
-// * 删除已有全部旧分类,并添加新分类
-// * linkis-gateway无法正常转换json为list
-// * [{"typeName": "test"},{"typeName": "DWD"}] ---> List classifications
-// * ["test","DWD"] ---> List typeNames
-// */
-// @RequestMapping(method = RequestMethod.PUT, path ="/{guid}/classifications")
-// public Message removeAndAddNewClassifications(@PathVariable String guid, @RequestBody List classifications) throws Exception {
-// assetService.removeAndAddClassifications(guid, classifications);
-//
-// return Message.ok().data("result","更新成功");
-// }
-
- /**
- * 删除已有全部旧分类,并添加新分类
- * 支持 {"newClassifications":["test","DWD"]} 非顶层的List数组转换
- */
- @RequestMapping(method = RequestMethod.PUT, path ="/{guid}/classifications")
- public Message removeAndAddClassifications(@PathVariable String guid, @RequestBody HiveTblClassificationInfo hiveTblClassificationInfo) throws Exception {
- List newClassifications = new ArrayList<>();
- Optional.ofNullable(hiveTblClassificationInfo.getNewClassifications()).orElseGet(()-> {
- logger.warn("hive table uid is %s, newClassifications is null",guid);
- return new ArrayList<>();
- }).stream().filter(Objects::nonNull).forEach(typeName -> {
- AtlasClassification atlasClassification =new AtlasClassification(typeName);
- atlasClassification.setPropagate(false);
- atlasClassification.setRemovePropagationsOnEntityDelete(true);
- newClassifications.add(atlasClassification);
- });
- assetService.removeAndAddClassifications(guid, newClassifications);
-
- return Message.ok().data("result","更新成功");
- }
-
- /**
- * 删除分类
- * @DELETE linkis-gateway 不支持DELETE方式
- */
- @RequestMapping(method = RequestMethod.POST, path ="/{guid}/classification/{classificationName}")
- public Message deleteClassification(@PathVariable String guid, @PathVariable final String classificationName) throws Exception {
- assetService.deleteClassification(guid, classificationName);
-
- return Message.ok().data("result","删除成功");
- }
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/AssetService.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/AssetService.java
deleted file mode 100644
index d42190e277..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/AssetService.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.service;
-
-import com.webank.wedatasphere.dss.data.asset.entity.*;
-import com.webank.wedatasphere.dss.data.common.exception.DataGovernanceException;
-import com.webank.wedatasphere.dss.data.common.atlas.AtlasClassificationV2.AtlasClassificationsV2;
-import org.apache.atlas.model.instance.AtlasClassification;
-import org.apache.atlas.model.lineage.AtlasLineageInfo;
-
-import java.sql.SQLException;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public interface AssetService {
- public Map getHiveSummary() throws DataGovernanceException;
-
- public List searchHiveTable(String classification, String query,
- int limit, int offset) throws DataGovernanceException;
-
- public HiveTblDetailInfo getHiveTblDetail(String guid) throws DataGovernanceException;
-
- public List getHiveTblPartition(String guid) throws DataGovernanceException;
-
- public String getHiveTblGuid(String qualifiedName) throws DataGovernanceException;
-
- public String getTbSelect(String guid) throws DataGovernanceException;
-
- public String getTbCreate(String guid) throws DataGovernanceException;
-
- public void modifyComment(String guid, String commentStr) throws DataGovernanceException;
-
- public void bulkModifyComment(Map commentMap) throws DataGovernanceException;
-
- public void setLabels(String guid, Set labels) throws DataGovernanceException;
-
- public void removeLabels(String guid, Set labels) throws DataGovernanceException;
-
- /**
- * 获取表实体的血缘信息
- */
- public AtlasLineageInfo getHiveTblLineage(final String guid, final AtlasLineageInfo.LineageDirection direction, final int depth) throws DataGovernanceException;
-
- public List getTop10Table() throws DataGovernanceException, SQLException;
-
- public void addClassifications(String guid, List classifications) throws DataGovernanceException;
-
- public void deleteClassification(String guid, String classificationName) throws DataGovernanceException;
-
- public void deleteClassifications(String guid, List classifications) throws DataGovernanceException;
-
- public void updateClassifications(String guid, List classifications) throws DataGovernanceException;
-
- /**
- * 为实体删除已有的分类,添加新的分类
- */
- public void removeAndAddClassifications(String guid, List newClassifications) throws DataGovernanceException;
-
- public AtlasClassificationsV2 getClassifications(String guid) throws DataGovernanceException;
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/WorkspaceInfoService.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/WorkspaceInfoService.java
deleted file mode 100644
index b218280262..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/WorkspaceInfoService.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.service;
-
-
-import com.webank.wedatasphere.dss.data.common.exception.DataGovernanceException;
-
-
-import java.util.List;
-
-public interface WorkspaceInfoService {
-
- public List getWorkspaceUsers(int workspaceId,String search) throws DataGovernanceException;
-
-}
diff --git a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java b/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java
deleted file mode 100644
index 31bf580ffc..0000000000
--- a/dss-apps/dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java
+++ /dev/null
@@ -1,518 +0,0 @@
-package com.webank.wedatasphere.dss.data.asset.service.impl;
-
-import com.google.gson.internal.LinkedTreeMap;
-import com.webank.wedatasphere.dss.data.asset.dao.MetaInfoMapper;
-import com.webank.wedatasphere.dss.data.asset.dao.impl.MetaInfoMapperImpl;
-import com.webank.wedatasphere.dss.data.asset.entity.HivePartInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveStorageInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveTblDetailInfo;
-import com.webank.wedatasphere.dss.data.asset.entity.HiveTblSimpleInfo;
-import com.webank.wedatasphere.dss.data.asset.service.AssetService;
-import com.webank.wedatasphere.dss.data.common.atlas.AtlasClassificationV2;
-import com.webank.wedatasphere.dss.data.common.atlas.AtlasService;
-import com.webank.wedatasphere.dss.data.common.conf.AtlasConf;
-import com.webank.wedatasphere.dss.data.common.exception.DAOException;
-import com.webank.wedatasphere.dss.data.common.exception.DataGovernanceException;
-import com.webank.wedatasphere.dss.data.common.utils.DateUtil;
-import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.model.instance.AtlasClassification;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.model.instance.AtlasRelatedObjectId;
-import org.apache.atlas.model.lineage.AtlasLineageInfo;
-import org.apache.atlas.model.typedef.AtlasClassificationDef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-
-@Service
-public class AssetServiceImpl implements AssetService {
- private static final Logger logger = LoggerFactory.getLogger(AssetServiceImpl.class);
-
- private AtlasService atlasService;
- private MetaInfoMapper metaInfoMapper;
-
- public AssetServiceImpl(AtlasService atlasService) {
- this.atlasService = atlasService;
- this.metaInfoMapper = new MetaInfoMapperImpl();
- }
-
- @Override
- public Map getHiveSummary() throws DataGovernanceException {
- try {
- Map result = new HashMap<>();
-
- result.put("hiveDb", atlasService.getHiveDbCnt());
- result.put("hiveTable", atlasService.getHiveTableCnt());
- result.put("hiveStore", metaInfoMapper.getTableStorage());
-
- return result;
- } catch (AtlasServiceException | DAOException | SQLException exception) {
- throw new DataGovernanceException(exception.getMessage());
- }
- }
-
- @Override
- public List searchHiveTable(String classification, String query,
- int limit, int offset) throws DataGovernanceException {
- List atlasEntityHeaders = null;
- try {
- atlasEntityHeaders = atlasService.searchHiveTable(classification, "*" + query + "*", true, limit, offset);
- } catch (AtlasServiceException ex) {
- throw new DataGovernanceException(ex.getMessage());
- }
-
- if (atlasEntityHeaders != null) {
- //columns 根据keyword来正则匹配过滤
- Pattern regex = Pattern.compile(query);
- return atlasEntityHeaders.parallelStream().filter(Objects::nonNull).map(atlasEntityHeader -> {
- HiveTblSimpleInfo hiveTblSimpleInfo = new HiveTblSimpleInfo();
- hiveTblSimpleInfo.setGuid(atlasEntityHeader.getGuid());
- hiveTblSimpleInfo.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
- String qualifiedName =stringValueOfObject(atlasEntityHeader.getAttribute("qualifiedName"));
- hiveTblSimpleInfo.setQualifiedName(qualifiedName);
- hiveTblSimpleInfo.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
- Object createTime = atlasEntityHeader.getAttribute("createTime");
- if (createTime != null) {
- hiveTblSimpleInfo.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
- }
- if(null != qualifiedName && qualifiedName.split("\\.").length >0){
- String dbName = qualifiedName.split("\\.")[0];
- hiveTblSimpleInfo.setDbName(dbName);
- }
- hiveTblSimpleInfo.setLabels(atlasEntityHeader.getLabels());
-
- try {
- AtlasEntity atlasEntity = atlasService.getHiveTblByGuid(atlasEntityHeader.getGuid());
-
- //comment
- hiveTblSimpleInfo.setComment(stringValueOfObject(atlasEntity.getAttribute("comment")));
- List