diff --git a/.gitignore b/.gitignore
index aa5f9b2526..983d6454a0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -101,6 +101,9 @@ dss-apps/dss-data-governance/dss-data-warehouse-dao/target
dss-apps/dss-data-governance/dss-data-warehouse-service/target
dss-apps/dss-data-governance/dss-data-warehouse-server/target
+#dss-git
+dss-git/dss-git-common/target
+dss-git/dss-git-server/target
# plugins
plugins/azkaban/linkis-jobtype/target
diff --git a/README-ZH.md b/README-ZH.md
index 0d04b86795..9e4da69b51 100644
--- a/README-ZH.md
+++ b/README-ZH.md
@@ -170,7 +170,7 @@
您也可以扫描下面的二维码,加入我们的 微信群,以获得更快速的响应。
-
+
## 谁在使用 DSS
diff --git a/README.md b/README.md
index 31bb3436b8..e59fbdc683 100644
--- a/README.md
+++ b/README.md
@@ -168,7 +168,7 @@ English | [中文](README-ZH.md)
You can scan the QR code below to join our WeChat to get more immediate response.
-
+
## Who is using DSS
diff --git a/assembly/bin/appconn-refresh.sh b/assembly/bin/appconn-refresh.sh
index 1a648adabb..37df8746fb 100644
--- a/assembly/bin/appconn-refresh.sh
+++ b/assembly/bin/appconn-refresh.sh
@@ -22,7 +22,8 @@ function isSuccess(){
function refresh() {
while true; do
- response=$(curl -H "Token-Code:" -H "Token-User:hadoop" -X GET http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}/api/rest_j/v1/dss/framework/project/appconn/${APPCONN_NAME}/load)
+ response=$(curl -H "Token-Code:xxx" -H "Token-User:hadoop" -X GET
+ http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}/api/rest_j/v1/dss/framework/project/appconn/${APPCONN_NAME}/load)
if [[ $response == *"not appconn manager node"* ]]; then
echo 'not appconn manager node, we will try again 5 seconds later'
sleep 5
diff --git a/assembly/bin/dssinstall.sh b/assembly/bin/dssinstall.sh
index 1fb1e4b3ab..157ebd72a7 100644
--- a/assembly/bin/dssinstall.sh
+++ b/assembly/bin/dssinstall.sh
@@ -105,7 +105,7 @@ function changeCommonConf(){
sed -i "s#hostname:.*#hostname: $SERVER_IP#g" $CONF_APPLICATION_YML
sed -i "s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g" $CONF_DSS_PROPERTIES
sed -i "s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g" $CONF_DSS_PROPERTIES
- sed -i "s#wds.linkis.server.mybatis.datasource.password.*#***REMOVED***$MYSQL_PASSWORD#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g" $CONF_DSS_PROPERTIES
sed -i "s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g" $CONF_DSS_PROPERTIES
sed -i "s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g" $CONF_DSS_PROPERTIES
sed -i "s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT/#g" $CONF_DSS_PROPERTIES
diff --git a/assembly/bin/install.sh b/assembly/bin/install.sh
new file mode 100644
index 0000000000..bf49bea929
--- /dev/null
+++ b/assembly/bin/install.sh
@@ -0,0 +1,378 @@
+#!/bin/sh
+#Actively load user env
+if [ -f "~/.bashrc" ];then
+ echo "Warning! user bashrc file does not exist."
+else
+ source ~/.bashrc
+fi
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
+SERVER_IP=""
+SERVER_HOME=""
+
+local_host="`hostname --fqdn`"
+LOCAL_IP=$(hostname -I)
+LOCAL_IP=${LOCAL_IP// /}
+
+#To be compatible with MacOS and Linux
+txt=""
+if [[ "$OSTYPE" == "darwin"* ]]; then
+ txt="''"
+elif [[ "$OSTYPE" == "linux-gnu" ]]; then
+ # linux
+ txt=""
+elif [[ "$OSTYPE" == "cygwin" ]]; then
+ echo "dss not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "msys" ]]; then
+ echo "dss not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "win32" ]]; then
+ echo "dss not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "freebsd"* ]]; then
+ txt=""
+else
+ echo "Operating system unknown, please tell us(submit issue) for better service"
+ exit 1
+fi
+
+function isSuccess(){
+ if [ $? -ne 0 ]; then
+ echo "Failed to " + $1
+ exit 1
+ else
+ echo "Succeed to " + $1
+ fi
+}
+
+function checkJava(){
+ java -version
+ isSuccess "execute java --version"
+}
+
+checkJava
+
+dos2unix -q ${workDir}/config/*
+isSuccess "execute dos2unix -q ${workDir}/config/*"
+dos2unix -q ${workDir}/bin/*
+
+echo "step1:load config"
+source ${workDir}/config/config.sh
+source ${workDir}/config/db.sh
+
+DSS_FILE_PATH="$workDir/$DSS_FILE_NAME"
+#dos2unix ${DSS_FILE_PATH}/sbin/*
+#dos2unix ${DSS_FILE_PATH}/sbin/ext/*
+if [ -z $DSS_FILE_NAME ]; then
+ echo "DSS_FILE_NAME is null "
+ exit 1
+fi
+
+function replaceCommonIp() {
+ if [ -z "$DSS_FRAMEWORK_PROJECT_SERVER_INSTALL_IP" ]; then
+ DSS_FRAMEWORK_PROJECT_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_FRAMEWORK_PROJECT_SERVER_PORT" ]; then
+ DSS_FRAMEWORK_PROJECT_SERVER_PORT=9002
+ fi
+
+ if [ -z "$DSS_FRAMEWORK_ORCHESTRATOR_SERVER_INSTALL_IP" ]; then
+ DSS_FRAMEWORK_ORCHESTRATOR_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_FRAMEWORK_ORCHESTRATOR_SERVER_PORT" ]; then
+ DSS_FRAMEWORK_ORCHESTRATOR_SERVER_PORT=9003
+ fi
+
+ if [ -z "$DSS_APISERVICE_SERVER_INSTALL_IP" ]; then
+ DSS_APISERVICE_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_APISERVICE_SERVER_PORT" ]; then
+ DSS_APISERVICE_SERVER_PORT=9004
+ fi
+
+ if [ -z "$DSS_WORKFLOW_SERVER_INSTALL_IP" ]; then
+ DSS_WORKFLOW_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_WORKFLOW_SERVER_PORT" ]; then
+ DSS_WORKFLOW_SERVER_PORT=9005
+ fi
+
+ if [ -z "$DSS_FLOW_EXECUTION_SERVER_INSTALL_IP" ]; then
+ DSS_FLOW_EXECUTION_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_FLOW_EXECUTION_SERVER_PORT" ]; then
+ DSS_FLOW_EXECUTION_SERVER_PORT=9006
+ fi
+
+ if [ -z "$DSS_SCRIPTIS_SERVER_INSTALL_IP" ]; then
+ DSS_SCRIPTIS_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_SCRIPTIS_SERVER_PORT" ]; then
+ DSS_SCRIPTIS_SERVER_PORT=9008
+ fi
+
+ if [ -z "$DSS_DATA_API_SERVER_INSTALL_IP" ]; then
+ DSS_DATA_API_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_DATA_API_SERVER_PORT" ]; then
+ DSS_DATA_API_SERVER_PORT=9208
+ fi
+
+ if [ -z "$DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP" ]; then
+ DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_DATA_GOVERNANCE_SERVER_PORT" ]; then
+ DSS_DATA_GOVERNANCE_SERVER_PORT=9209
+ fi
+
+ if [ -z "$DSS_GUIDE_SERVER_INSTALL_IP" ]; then
+ DSS_GUIDE_SERVER_INSTALL_IP=$LOCAL_IP
+ fi
+ if [ -z "$DSS_GUIDE_SERVER_PORT" ]; then
+ DSS_GUIDE_SERVER_PORT=9210
+ fi
+
+ if [[ $GATEWAY_INSTALL_IP == "127.0.0.1" ]] || [ -z "$GATEWAY_INSTALL_IP" ]; then
+ #echo "GATEWAY_INSTALL_IP is equals $GATEWAY_INSTALL_IP ,we will change it to ip address"
+ GATEWAY_INSTALL_IP=$LOCAL_IP
+ fi
+ if [[ $EUREKA_INSTALL_IP == "127.0.0.1" ]] || [ -z "$EUREKA_INSTALL_IP" ]; then
+ #echo "EUREKA_INSTALL_IP is equals $EUREKA_INSTALL_IP ,we will change it to ip address"
+ EUREKA_INSTALL_IP=$LOCAL_IP
+ fi
+}
+##替换真实的IP
+replaceCommonIp
+
+EUREKA_URL=http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/
+
+## excecute sql
+source ${workDir}/bin/executeSQL.sh
+
+function changeCommonConf(){
+ sed -i "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $CONF_APPLICATION_YML
+ sed -i "s#hostname:.*#hostname: $SERVER_IP#g" $CONF_APPLICATION_YML
+ sed -i "s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT/#g" $CONF_DSS_PROPERTIES
+ sed -i "s#wds.linkis.gateway.wtss.url.*#wds.linkis.gateway.wtss.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT/#g" $CONF_DSS_PROPERTIES
+}
+
+##function start
+function changeConf(){
+ sed -i "s#spring.server.port=.*#spring.server.port=$SERVER_PORT#g" $CONF_SERVER_PROPERTIES
+ if [[ $SERVER_NAME == "dss-framework-orchestrator-server" ]] || [[ $SERVER_NAME == "dss-workflow-server" ]]; then
+ SERVER_FULL_NAME=$SERVER_NAME
+ SERVER_FULL_NAME=$SERVER_NAME-$ENV_FLAG
+ sed -i "s#spring.spring.application.name=.*#spring.spring.application.name=$SERVER_FULL_NAME#g" $CONF_SERVER_PROPERTIES
+ fi
+ sed -i "s#wds.dss.appconn.scheduler.project.store.dir.*#wds.dss.appconn.scheduler.project.store.dir=$WDS_SCHEDULER_PATH#g" $CONF_SERVER_PROPERTIES
+ isSuccess "subsitution $CONF_SERVER_PROPERTIES"
+}
+##function end
+
+
+UPLOAD_PUBLIC_IPS=""
+##function start
+function uploadServiceFile(){
+ if [[ $SERVER_IP == "127.0.0.1" ]]; then
+ SERVER_IP=$local_host
+ fi
+ #echo "$SERVER_NAME-step3:copy install package"
+ # upload project conf
+ # cp -rfp $SSH_PORT ${workDir}/config/{$SERVER_NAME}.properties $CONF_PATH
+ if [[ $UPLOAD_PUBLIC_IPS == *",${ENV_FLAG}-$SERVER_IP,"* ]]; then
+ return 0
+ fi
+ cp -rfp ${DSS_FILE_PATH}/* $SERVER_HOME
+ cp -rfp ${workDir}/bin $SERVER_HOME
+ cp -rfp ${workDir}/config/* $SERVER_HOME/conf
+ sudo chown -R $deployUser:$deployUser $SERVER_HOME
+ UPLOAD_PUBLIC_IPS="$UPLOAD_PUBLIC_IPS,${ENV_FLAG}-$SERVER_IP,"
+ changeCommonConf
+# echo "UPLOAD_PUBLIC_IPS-->$UPLOAD_PUBLIC_IPS"
+}
+
+##function start
+function installPackage(){
+ if [[ $SERVER_IP == "127.0.0.1" ]]; then
+ SERVER_IP=$local_host
+ fi
+ if [ -z $SERVER_NAME ]; then
+ echo "ERROR:SERVER_NAME is null "
+ exit 1
+ fi
+ uploadServiceFile
+ # change configuration
+ changeConf
+}
+
+function dssWebInstall(){
+if ! test -e ${LINKIS_DSS_HOME}/wedatasphere-dss-web*.zip; then
+ echo "**********Error: please put wedatasphere-dss-web-xxx.zip in ${LINKIS_DSS_HOME}! "
+ exit 1
+else
+ echo "Start to unzip dss web package."
+ unzip -d ${LINKIS_DSS_HOME}/web/ -o ${LINKIS_DSS_HOME}/wedatasphere-dss-web-*.zip > /dev/null 2>&1
+ sed -i "s#linkis_url.*#linkis_url=${LINKIS_GATEWAY_URL}#g" ${LINKIS_DSS_HOME}/web/config.sh
+ isSuccess "Unzip dss web package to ${LINKIS_DSS_HOME}/web"
+fi
+}
+
+##Install dss projects
+function installDssProject() {
+ echo "step2:update config"
+# if [ "$DSS_INSTALL_HOME" != "" ]
+# then
+# rm -rf $DSS_INSTALL_HOME
+# fi
+ #echo ""
+ #echo "-----------------DSS install start--------------------"
+ SERVER_HOME=$DSS_INSTALL_HOME
+ if [ "$SERVER_HOME" == "" ]
+ then
+ export SERVER_HOME=${workDir}/DSSInstall
+ fi
+ if [ -d $SERVER_HOME ] && [ "$SERVER_HOME" != "$workDir" ]; then
+ rm -r $SERVER_HOME-bak
+ echo "mv $SERVER_HOME $SERVER_HOME-bak"
+ mv $SERVER_HOME $SERVER_HOME-bak
+ fi
+ echo "create dir SERVER_HOME: $SERVER_HOME"
+ sudo mkdir -p $SERVER_HOME
+ isSuccess "Create the dir of $SERVER_HOME"
+ sudo chown -R $deployUser:$deployUser $SERVER_HOME
+ isSuccess "chown -R $deployUser:$deployUser $SERVER_HOME"
+
+ #echo ""
+ SERVER_NAME=dss-framework-project-server
+ SERVER_IP=$DSS_FRAMEWORK_PROJECT_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_FRAMEWORK_PROJECT_SERVER_PORT
+
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-framework/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-framework
+ LOG_PATH=$SERVER_HOME/logs/dss-framework/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###install project-Server
+ installPackage
+ #echo ""
+
+ SERVER_NAME=dss-framework-orchestrator-server
+ SERVER_IP=$DSS_FRAMEWORK_ORCHESTRATOR_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_FRAMEWORK_ORCHESTRATOR_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-framework/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-framework
+ LOG_PATH=$SERVER_HOME/logs/dss-framework/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###install orchestrator-Server
+ installPackage
+ #echo ""
+
+ SERVER_NAME=dss-apiservice-server
+ SERVER_IP=$DSS_APISERVICE_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_APISERVICE_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-apps/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-apps
+ LOG_PATH=$SERVER_HOME/logs/dss-apps/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###install dss-apiservice-server
+ installPackage
+ #echo ""
+
+ SERVER_NAME=dss-scriptis-server
+ SERVER_IP=$DSS_SCRIPTIS_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_SCRIPTIS_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-apps/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-apps
+ LOG_PATH=$SERVER_HOME/logs/dss-apps/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###install dss-scriptis-server
+ installPackage
+ #echo ""
+
+ ##Flow execution Install
+ PACKAGE_DIR=dss
+ SERVER_NAME=dss-flow-execution-server
+ SERVER_IP=$DSS_FLOW_EXECUTION_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_FLOW_EXECUTION_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-orchestrator/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-orchestrator
+ LOG_PATH=$SERVER_HOME/logs/dss-orchestrator/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###Install flow execution
+ installPackage
+ #echo ""
+
+ SERVER_NAME=dss-workflow-server
+ SERVER_IP=$DSS_WORKFLOW_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_WORKFLOW_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-orchestrator/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-orchestrator
+ LOG_PATH=$SERVER_HOME/logs/dss-orchestrator/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ ###install dss-workflow-server
+ installPackage
+
+ ###install dss-data-api-server
+ SERVER_NAME=dss-data-api-server
+ SERVER_IP=$DSS_DATA_API_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_DATA_API_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-data-api/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-data-api
+ LOG_PATH=$SERVER_HOME/logs/dss-data-api/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ installPackage
+
+ ###install dss-data-governance-server
+ SERVER_NAME=dss-data-governance-server
+ SERVER_IP=$DSS_DATA_GOVERNANCE_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_DATA_GOVERNANCE_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-data-governance/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-data-governance
+ LOG_PATH=$SERVER_HOME/logs/dss-data-governance/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ installPackage
+
+ ###install dss-guide-server
+ SERVER_NAME=dss-guide-server
+ SERVER_IP=$DSS_GUIDE_SERVER_INSTALL_IP
+ SERVER_PORT=$DSS_GUIDE_SERVER_PORT
+ UPLOAD_LIB_FILES=$DSS_FILE_PATH/lib/dss-guide/$SERVER_NAME
+ LIB_PATH=$SERVER_HOME/lib/dss-guide
+ LOG_PATH=$SERVER_HOME/logs/dss-guide/$SERVER_NAME
+ CONF_SERVER_PROPERTIES=$SERVER_HOME/conf/$SERVER_NAME.properties
+ CONF_DSS_PROPERTIES=$SERVER_HOME/conf/dss.properties
+ CONF_APPLICATION_YML=$SERVER_HOME/conf/application-dss.yml
+ installPackage
+
+ #echo "-----------------DSS install end--------------------"
+ #echo ""
+
+}
+ENV_FLAG="dev"
+installDssProject
+
+echo "Congratulations! You have installed DSS $DSS_VERSION successfully, please use sbin/dss-start-all.sh to start it!"
+
diff --git a/assembly/config/config.sh b/assembly/config/config.sh
index 76633b44b6..9c1c506bad 100644
--- a/assembly/config/config.sh
+++ b/assembly/config/config.sh
@@ -25,7 +25,7 @@ GATEWAY_INSTALL_IP=127.0.0.1
GATEWAY_PORT=9001
### Linkis BML Token
-BML_AUTH=
+BML_AUTH=BML-AUTH
################### The install Configuration of all Micro-Services start #####################
#
@@ -50,16 +50,16 @@ DSS_APPS_SERVER_PORT=9044
############## ############## dss_appconn_instance configuration start ############## ##############
####eventchecker表的地址,一般就是dss数据库
-EVENTCHECKER_JDBC_URL=jdbc:mysql://172.0.0.1:3305/linkis_kinghao?characterEncoding=UTF-8
+EVENTCHECKER_JDBC_URL=jdbc:mysql://172.16.16.16:3305/linkis_kinghao?characterEncoding=UTF-8
EVENTCHECKER_JDBC_USERNAME=linkis
EVENTCHECKER_JDBC_PASSWORD=linkis@Wds
#### hive地址
-DATACHECKER_JOB_JDBC_URL=jdbc:mysql://172.0.0.1:3306/hivemeta?useUnicode=true
+DATACHECKER_JOB_JDBC_URL=jdbc:mysql://172.16.16.10:3306/hivemeta?useUnicode=true
DATACHECKER_JOB_JDBC_USERNAME=hivemeta
DATACHECKER_JOB_JDBC_PASSWORD=Linkishivemeta@123
#### 元数据库,可配置成和DATACHECKER_JOB的一致
-DATACHECKER_BDP_JDBC_URL=jdbc:mysql://172.0.0.1:3306/hivemeta?useUnicode=true
+DATACHECKER_BDP_JDBC_URL=jdbc:mysql://172.16.16.10:3306/hivemeta?useUnicode=true
DATACHECKER_BDP_JDBC_USERNAME=hivemeta
DATACHECKER_BDP_JDBC_PASSWORD=Linkishivemeta@123
diff --git a/assembly/dss-package/pom.xml b/assembly/dss-package/pom.xml
index 842307e240..53f64fa10b 100644
--- a/assembly/dss-package/pom.xml
+++ b/assembly/dss-package/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
@@ -78,6 +78,10 @@
dss-sender-service
${dss.version}
+
+ javax.servlet-api
+ javax.servlet
+
@@ -103,10 +107,18 @@
linkis-hadoop-common
${linkis.version}
+
+ javax.servlet
+ javax.servlet-api
+
netty
io.netty
+
+ javax.servlet-api
+ javax.servlet
+
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 0e97116a4d..b19ac7a680 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../pom.xml
pom
diff --git a/conf/application-dss.yml b/conf/application-dss.yml
new file mode 100644
index 0000000000..b0473838d8
--- /dev/null
+++ b/conf/application-dss.yml
@@ -0,0 +1,23 @@
+
+eureka:
+ client:
+ serviceUrl:
+ defaultZone: http://127.0.0.1:20303/eureka/
+ #instance:
+ #prefer-ip-address: true
+ #instance-id: ${spring.cloud.client.ip-address}:${server.port}
+ #metadata-map:
+ #test: wedatasphere
+
+management:
+ endpoints:
+ web:
+ exposure:
+ include: refresh,info
+logging:
+ config: classpath:log4j2.xml
+
+#mybatis:
+# configuration:
+# log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
+
diff --git a/conf/atlas-application.properties b/conf/atlas-application.properties
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/conf/dss-apiservice-server.properties b/conf/dss-apiservice-server.properties
new file mode 100644
index 0000000000..5162f42bcd
--- /dev/null
+++ b/conf/dss-apiservice-server.properties
@@ -0,0 +1,42 @@
+#
+# Copyright 2019 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+
+# Spring configurations
+spring.server.port=9206
+spring.spring.application.name=dss-apiservice-server
+
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/apiservice/core/dao/mapper/*.xml
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.apiservice.core.bo,com.webank.wedatasphere.dss.apiservice.core.vo
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.apiservice.core.dao
+
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.apiservice.core.restful
+
+#sit
+wds.linkis.server.version=v1
+wds.linkis.server.url=
+
+#test
+wds.linkis.test.mode=false
+wds.linkis.test.user=
+
+
+#dsm
+wds.linkis.server.dsm.admin.users=
+
+
+#用于执行的datasource配置
+wds.linkis.datasource.hikari.maximumPoolSize=100
+wds.linkis.datasource.hikari.minimumIdle=10
diff --git a/conf/dss-data-api-server.properties b/conf/dss-data-api-server.properties
new file mode 100644
index 0000000000..fe386097ed
--- /dev/null
+++ b/conf/dss-data-api-server.properties
@@ -0,0 +1,39 @@
+#
+# /*
+# * Copyright 2019 WeBank
+# *
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+#
+
+# Spring configurations
+spring.server.port=9208
+spring.spring.application.name=dss-data-api-server
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.data.api.server.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/data/api/server/dao/impl/*.xml
+
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.data.api.server.entity
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.data.api.server.dao
+
+#wds.linkis.gateway.ip=127.0.0.1
+#wds.linkis.gateway.port=9001
+#wds.linkis.gateway.url=http://127.0.0.1:9001/
diff --git a/conf/dss-data-governance-server.properties b/conf/dss-data-governance-server.properties
new file mode 100644
index 0000000000..b3980c6ccc
--- /dev/null
+++ b/conf/dss-data-governance-server.properties
@@ -0,0 +1,53 @@
+#
+# /*
+# * Copyright 2019 WeBank
+# *
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+#
+
+# Spring configurations
+spring.server.port=9209
+spring.spring.application.name=dss-data-governance-server
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.data.asset.restful,com.webank.wedatasphere.dss.data.classification.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/data/asset/dao/impl/*.xml
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.data.asset.entity
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.data.asset.dao,com.webank.wedatasphere.dss.data.warehouse.dao,com.webank.wedatasphere.dss.data.warehouse.mapper
+
+#wds.linkis.gateway.ip=127.0.0.1
+#wds.linkis.gateway.port=9001
+#wds.linkis.gateway.url=http://127.0.0.1:9001/
+
+
+# atlas config
+atlas.rest.address=http://xxxxxxx:21000
+atlas.username=xxxxxxxx
+atlas.password=yyyyyyyyy
+atlas.client.readTimeoutMSecs=60000
+atlas.client.connectTimeoutMSecs=60000
+
+atlas.cluster.name=primary
+
+# hive metadata config
+metastore.datasource.driver=com.mysql.jdbc.Driver
+metastore.datasource.url=jdbc:mysql://xxxxxx:yyyy/metastore?characterEncoding=UTF-8
+metastore.datasource.username=xxxxxx
+metastore.datasource.password=yyyyyy
\ No newline at end of file
diff --git a/conf/dss-flow-execution-server.properties b/conf/dss-flow-execution-server.properties
new file mode 100644
index 0000000000..1d8eb8d285
--- /dev/null
+++ b/conf/dss-flow-execution-server.properties
@@ -0,0 +1,55 @@
+#
+# Copyright 2019 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+
+# Spring configurations
+spring.server.port=9006
+
+spring.spring.application.name=dss-flow-entrance
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/flow/execution/entrance/dao/impl/*.xml,classpath*:org/apache/linkis/jobhistory/dao/impl/*.xml
+
+wds.linkis.server.mybatis.typeAliasesPackage=
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.flow.execution.entrance.dao,org.apache.linkis.jobhistory.dao
+
+
+wds.linkis.server.restful.scan.packages=org.apache.linkis.entrance.restful,com.webank.wedatasphere.dss.flow.execution.entrance.restful
+
+#wds.linkis.server.component.exclude.classes=org.apache.linkis.DataWorkCloudApplication
+
+wds.linkis.engine.application.name=flowExecutionEngine
+wds.linkis.enginemanager.application.name=flowExecution
+
+wds.linkis.query.application.name=linkis-ps-publicservice
+
+wds.linkis.console.config.application.name=linkis-ps-publicservice
+wds.linkis.engine.creation.wait.time.max=20m
+wds.linkis.server.version=v1
+
+wds.linkis.server.socket.mode=true
+
+wds.linkis.client.flow.adminuser=ws
+wds.linkis.client.flow.author.user.token=WS-AUTH
+
+wds.linkis.server.component.exclude.classes=org.apache.linkis.entranceclient.conf.ClientForEntranceSpringConfiguration,org.apache.linkis.entranceclient.conf.ClientSpringConfiguration
+
+wds.linkis.server.component.exclude.packages=org.apache.linkis.entrance.restful.
+spring.spring.main.allow-bean-definition-overriding=true
+
+wds.linkis.entrance.config.log.path=file:///appcom/tmp/dss/
+wds.linkis.spark.engine.version=2.4.3
+wds.linkis.hive.engine.version=2.3.3
diff --git a/conf/dss-framework-orchestrator-server.properties b/conf/dss-framework-orchestrator-server.properties
index b70b14792b..d6d0a1c4dd 100644
--- a/conf/dss-framework-orchestrator-server.properties
+++ b/conf/dss-framework-orchestrator-server.properties
@@ -32,7 +32,6 @@ wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.server.
wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.orchestrator.core.dao,com.webank.wedatasphere.dss.server.dao,com.webank.wedatasphere.dss.application.dao,com.webank.wedatasphere.dss.workspace.mapper,com.webank.wedatasphere.dss.workspace.common.dao,com.webank.wedatasphere.dss.workspace.common.dao,com.webank.wedatasphere.dss.orchestrator.db.dao,com.webank.wedatasphere.dss.workflow.dao,com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.flow.execution.entrance.dao
-wds.dss.server.scheduling.clear.cs.cron=0 0 3 * * ?
wds.dss.publish.max.remain.version=3
diff --git a/conf/dss-framework-project-server.properties b/conf/dss-framework-project-server.properties
new file mode 100644
index 0000000000..6b8086de83
--- /dev/null
+++ b/conf/dss-framework-project-server.properties
@@ -0,0 +1,37 @@
+#
+# Copyright 2019 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+
+# Spring configurations
+spring.server.port=9202
+spring.spring.application.name=dss-framework-project-server
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.framework.workspace.restful,com.webank.wedatasphere.dss.framework.project.restful,com.webank.wedatasphere.dss.framework.release.restful,com.webank.wedatasphere.dss.framework.appconn.restful,com.webank.wedatasphere.dss.framework.admin.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/framework/workspace/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/application/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/project/dao/impl/*Mapper.xml,classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/release/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/admin/xml/impl/*.xml
+
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.application.entity,com.webank.wedatasphere.dss.common.entity,com.webank.wedatasphere.dss.framework.workspace.bean,com.webank.wedatasphere.dss.framework.project.entity,com.webank.wedatasphere.dss.framework.appconn.entity,com.webank.wedatasphere.dss.framework.release.entity,com.webank.wedatasphere.dss.framework.admin.pojo.entity
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.framework.workspace.dao,com.webank.wedatasphere.dss.application.dao,com.webank.wedatasphere.dss.framework.project.dao,com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.framework.release.dao,com.webank.wedatasphere.dss.framework.admin.xml
+
+wds.dss.appconn.checker.development.ignore.list=workflow,sendemail
+wds.dss.appconn.checker.project.ignore.list=visualis
+
diff --git a/conf/dss-guide-server.properties b/conf/dss-guide-server.properties
new file mode 100644
index 0000000000..617935749d
--- /dev/null
+++ b/conf/dss-guide-server.properties
@@ -0,0 +1,55 @@
+#
+# /*
+# * Copyright 2019 WeBank
+# *
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+#
+
+# Spring configurations
+spring.server.port=9210
+spring.spring.application.name=dss-guide-server
+
+spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
+spring.jackson.time-zone=GMT+8
+
+wds.linkis.server.version=v1
+
+wds.linkis.log.clear=true
+
+
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.guide.server.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/guide/server/dao/impl/*.xml
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.guide.server.entity
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.guide.server.dao
+
+
+## guide_images_path
+guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
+guide.chapter.images.path=/opt/dss/dss-guide-server/guide_images/
+
+#gitbook
+#The machine where the file exists
+target.ip.address=127.0.0.1
+#The file path of the machine where the file is stored
+host.gitbook.path=/appcom/Install/ApacheInstall/gitbook_books/_book
+#The path to copy the file to the current machine
+target.gitbook.path=/appcom/Install/ApacheInstall/gitbook_books
+#SUMMARY.md ignore absolve
+summary.ignore.model=km
+#sync model include: gitbook or database
+guide.sync.model=gitbook
diff --git a/conf/dss-scriptis-server.properties b/conf/dss-scriptis-server.properties
new file mode 100644
index 0000000000..5a37729566
--- /dev/null
+++ b/conf/dss-scriptis-server.properties
@@ -0,0 +1,39 @@
+#
+# Copyright 2019 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+
+# Spring configurations
+spring.server.port=9009
+spring.spring.application.name=dss-scriptis-server
+
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/scriptis/dao/mapper/*.xml
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.scriptis.vo
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.scriptis.dao
+
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.scriptis.restful
+
+#sit
+wds.linkis.server.version=v1
+wds.linkis.server.url=
+
+#test
+wds.linkis.test.mode=false
+wds.linkis.test.user=
+
+wds.dss.scriptis.global.limits.exportResEnable=false
+wds.dss.scriptis.global.limits.exportTableEnable=false
+wds.dss.scriptis.global.limits.downloadResEnable=false
+wds.dss.scriptis.global.limits.resCopyEnable=false
+wds.dss.scriptis.global.limits.proxyEnable=false
diff --git a/conf/dss-server.properties b/conf/dss-server.properties
index af19be3563..6aef7895bb 100644
--- a/conf/dss-server.properties
+++ b/conf/dss-server.properties
@@ -22,7 +22,7 @@ wds.dss.appconn.checker.development.ignore.list=orchestrator-framework,workflow,
wds.dss.appconn.checker.project.ignore.list=
## filter appconn
wds.dss.appconn.disabled=
-wds.dss.trust.token=***REMOVED***
+wds.dss.trust.token=FotUrJ9JmhQ=
##import file dir
wds.dss.server.scheduling.clear.cs.cron=0/5 * * * * ?
@@ -44,7 +44,7 @@ wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
wds.linkis.server.socket.mode=true
wds.linkis.client.flow.adminuser=ws
-wds.linkis.client.flow.author.user.token=172.0.0.1
+wds.linkis.client.flow.author.user.token=WS-AUTH
wds.linkis.server.component.exclude.classes=org.apache.linkis.entranceclient.conf.ClientForEntranceSpringConfiguration,org.apache.linkis.entranceclient.conf.ClientSpringConfiguration
diff --git a/conf/dss-workflow-server.properties b/conf/dss-workflow-server.properties
new file mode 100644
index 0000000000..df4bd6dea5
--- /dev/null
+++ b/conf/dss-workflow-server.properties
@@ -0,0 +1,44 @@
+#
+# Copyright 2019 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+
+# Spring configurations
+spring.server.port=9207
+spring.spring.application.name=dss-workflow-server-dev
+
+wds.linkis.test.mode=false
+
+wds.linkis.test.user=
+
+wds.linkis.log.clear=true
+
+wds.linkis.server.version=v1
+
+##restful
+wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.dss.workflow.restful
+
+##mybatis
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/workflow/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml
+
+wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.workflow.entity,com.webank.wedatasphere.dss.framework.appconn.entity
+
+wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.workflow.dao,com.webank.wedatasphere.dss.framework.appconn.dao
+
+##import file dir
+wds.dss.file.upload.dir=/appcom/tmp/uploads
+
+wds.dss.server.export.env=DEV
+wds.dss.server.import.env=DEV
+
diff --git a/conf/dss.properties b/conf/dss.properties
index 674e0bf403..f61562fc8a 100644
--- a/conf/dss.properties
+++ b/conf/dss.properties
@@ -22,11 +22,11 @@ wds.linkis.gateway.wtss.url=http://127.0.0.1:9001/
wds.linkis.mysql.is.encrypt=false
wds.linkis.server.mybatis.datasource.url=
wds.linkis.server.mybatis.datasource.username=
-***REMOVED***
-wds.linkis.bml.auth.token.value=
-linkis.configuration.linkisclient.auth.token.value=
-wds.linkis.context.client.auth.value=
-wds.linkis.errorcode.auth.token=
+wds.linkis.server.mybatis.datasource.password=
+wds.linkis.bml.auth.token.value=BML-AUTH
+linkis.configuration.linkisclient.auth.token.value=BML-AUTH
+wds.linkis.context.client.auth.value=BML-AUTH
+wds.linkis.errorcode.auth.token=BML-AUTH
wds.dss.check.server.active.period=30
diff --git a/db/dss_ddl.sql b/db/dss_ddl.sql
index 1b52e2c607..551b4b59ab 100644
--- a/db/dss_ddl.sql
+++ b/db/dss_ddl.sql
@@ -51,6 +51,7 @@ CREATE TABLE `dss_orchestrator_info` (
`orchestrator_level` varchar(32) DEFAULT NULL COMMENT '工作流级别',
`update_user` varchar(100) DEFAULT NULL COMMENT '更新人',
`update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
+ `status` VARCHAR(64),
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `unique_idx_uuid` (`uuid`)
) ENGINE=InnoDB AUTO_INCREMENT=326 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
@@ -70,6 +71,7 @@ CREATE TABLE `dss_orchestrator_version_info` (
`content` varchar(255) DEFAULT NULL,
`context_id` varchar(200) DEFAULT NULL COMMENT '上下文ID',
`valid_flag` INT(1) DEFAULT '1' COMMENT '版本有效标示,0:无效;1:有效',
+ `commit_id` varchar(64),
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=422 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
@@ -116,6 +118,7 @@ CREATE TABLE `dss_project` (
`dev_process` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT '开发流程,多个以英文逗号分隔,取得的值是dss_workspace_dictionary中的dic_key(parent_key=p_develop_process)',
`orchestrator_mode` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT '编排模式,多个以英文逗号分隔,取得的值是dss_workspace_dictionary中的dic_key(parent_key=p_arrangement_mode或下面一级)',
`visible` tinyint(4) DEFAULT '1' COMMENT '0:已删除;1:未删除(默认)',
+ `associate_git` TINYINT DEFAULT '0' COMMENT '0:未接入git,1:已接入git',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=313 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT;
@@ -639,3 +642,33 @@ key `idx_limit_name` (`limit_name`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='dss用户限制表';
+DROP TABLE IF EXISTS `dss_workspace_associate_git`;
+CREATE TABLE `dss_workspace_associate_git` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) DEFAULT NULL,
+ `git_user` varchar(64) DEFAULT NULL COMMENT 'git登录用户名',
+ `git_password` VARCHAR(255) DEFAULT NULL COMMENT 'git登录密码,用于跳转',
+ `git_token` varchar(255) COMMENT '用户配置的git token',
+ `git_url` varchar(255),
+ `create_time` datetime DEFAULT NULL,
+ `update_time` datetime DEFAULT NULL,
+ `create_by` varchar(128) DEFAULT NULL,
+ `update_by` varchar(128) DEFAULT NULL,
+ `type` varchar(32) DEFAULT NULL,
+ `git_user_id` varchar(32) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='工作空间绑定的git信息';
+
+
+DROP TABLE IF EXISTS `dss_orchestrator_submit_job_info`;
+CREATE TABLE `dss_orchestrator_submit_job_info` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
+ `orchestrator_id` bigint(20) NOT NULL,
+ `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+ `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间',
+ `instance_name` varchar(128) DEFAULT NULL COMMENT '提交任务的实例',
+ `status` varchar(128) DEFAULT NULL COMMENT '提交任务状态',
+ `error_msg` varchar(2048) DEFAULT NULL COMMENT '提交任务异常信息',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='dss_orchestrator_submit_job_info表';
+
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
index 51cf70eaa5..7b2ecf1590 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
@@ -54,7 +54,7 @@
com.webank.wedatasphere.dss
dss-origin-sso-integration-standard
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
org.apache.linkis
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
index 61ff244b5e..e6aa66b835 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
@@ -28,12 +28,27 @@
public class DataChecker {
public final static String SOURCE_TYPE = "source.type";
public final static String DATA_OBJECT = "check.object";
+ /**
+ * 检查对象在节点中的序号
+ */
+ public final static String DATA_OBJECT_NUM = "check.sn.object.num";
public final static String WAIT_TIME = "max.check.hours";
public final static String QUERY_FREQUENCY = "query.frequency";
public final static String TIME_SCAPE = "time.scape";
public final static String MASK_URL = "bdp.mask.url";
public final static String MASK_APP_ID = "bdp.mask.app.id";
public final static String MASK_APP_TOKEN = "bdp.mask.app.token";
+ public final static String CONTEXTID_USER = "contextId.user";
+ public final static String CONTEXTID_PROJECT_NAME = "contextId.projectName";
+ public final static String CONTEXTID_FLOW_NAME = "contextId.flowName";
+ public final static String NAME_NAME = "nodeName";
+
+ public final static String QUALITIS_CHECK = "qualitis.check";
+ public final static String QUALITIS_SWITCH = "job.eventchecker.qualitis.switch";
+ public final static String QUALITIS_CHECK_DEFAULT = "qualitis.check.default";
+ public final static String EXPAND_SECOND_PARTITION = "hourly.secondary.partition";
+
+
private Properties p;
private static final Logger logger = LoggerFactory.getLogger(DataChecker.class);;
@@ -48,7 +63,7 @@ public DataChecker(Properties p, DataCheckerExecutionAction action) {
maxWaitTime = Long.valueOf(p.getProperty(DataChecker.WAIT_TIME, "1")) * 3600 * 1000;
//test over time
// maxWaitTime = Long.valueOf(p.getProperty(DataChecker.WAIT_TIME, "1")) * 120 * 1000;
- queryFrequency = Integer.valueOf(p.getProperty(DataChecker.QUERY_FREQUENCY, "30000"));
+ queryFrequency = Integer.valueOf(p.getProperty(DataChecker.QUERY_FREQUENCY, "60000"));
}
@@ -66,23 +81,23 @@ public void run() {
}
}catch (Exception ex){
dataCheckerAction.setState(RefExecutionState.Failed);
- throw new RuntimeException("get DataChecker result failed", ex);
+ throw ex;
}
}
- public void begineCheck(RefExecutionAction action){
+ public void begineCheck(DataCheckerExecutionAction action){
boolean success=false;
try {
success= wbDao.validateTableStatusFunction(p, logger,action);
}catch (Exception ex){
dataCheckerAction.setState(RefExecutionState.Failed);
logger.error("datacheck error",ex);
- throw new RuntimeException("get DataChecker result failed", ex);
+ throw ex;
}
if(success) {
dataCheckerAction.setState(RefExecutionState.Success);
- }else {
+ } else if (dataCheckerAction.getState() != RefExecutionState.Failed) {
dataCheckerAction.setState(RefExecutionState.Running);
}
}
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/common/CheckDataObject.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/common/CheckDataObject.java
index 60223ca56b..66e44eb220 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/common/CheckDataObject.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/common/CheckDataObject.java
@@ -75,6 +75,20 @@ public enum Type{
TABLE,
}
+ public String forMat(String secondPartitionExpand){
+ final StringBuilder sb = new StringBuilder();
+ sb.append(dbName)
+ .append(".")
+ .append(tableName);
+ if(type==Type.PARTITION){
+ sb.append("{").append(partitionName);
+ if(secondPartitionExpand!=null){
+ sb.append("/").append(secondPartitionExpand);
+ }
+ sb.append("}");
+ }
+ return sb.toString();
+ }
@Override
public String toString() {
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
index e3bc2ae270..924fc8504b 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
@@ -24,8 +24,7 @@
import com.webank.wedatasphere.dss.appconn.datachecker.common.MaskCheckNotExistException;
import com.webank.wedatasphere.dss.appconn.datachecker.utils.HttpUtils;
import com.webank.wedatasphere.dss.appconn.datachecker.utils.QualitisUtil;
-import com.webank.wedatasphere.dss.standard.app.development.listener.common.RefExecutionAction;
-import com.webank.wedatasphere.dss.standard.app.development.listener.common.RefExecutionState;
+import com.webank.wedatasphere.dss.common.exception.DSSRuntimeException;
import okhttp3.FormBody;
import okhttp3.RequestBody;
import okhttp3.Response;
@@ -54,13 +53,6 @@ public class DataCheckerDao {
private static final String SQL_SOURCE_TYPE_JOB_PARTITION =
"SELECT * FROM DBS d JOIN TBLS t ON t.DB_ID = d.DB_ID JOIN PARTITIONS p ON p.TBL_ID = t.TBL_ID WHERE d.NAME=? AND t.TBL_NAME=? AND p.PART_NAME=?";
- private static final String SQL_SOURCE_TYPE_BDP =
- "SELECT * FROM desktop_bdapimport WHERE bdap_db_name = ? AND bdap_table_name = ? AND target_partition_name = ? AND status = '1';";
-
- private static final String SQL_SOURCE_TYPE_BDP_WITH_TIME_CONDITION =
- "SELECT * FROM desktop_bdapimport WHERE bdap_db_name = ? AND bdap_table_name = ? AND target_partition_name = ? " +
- "AND (UNIX_TIMESTAMP() - UNIX_TIMESTAMP(STR_TO_DATE(modify_time, '%Y-%m-%d %H:%i:%s'))) <= ? AND status = '1';";
-
private static final String SQL_DOPS_CHECK_TABLE =
"SELECT * FROM dops_clean_task_list WHERE db_name = ? AND tb_name = ? AND part_name is null AND task_state NOT IN (10,13) order by order_id desc limit 1";
private static final String SQL_DOPS_CHECK_PARTITION =
@@ -72,7 +64,6 @@ public class DataCheckerDao {
private static final String MASK_SOURCE_TYPE = "maskdb";
private static DataSource jobDS;
- private static DataSource bdpDS;
private static DataSource dopsDS;
private static volatile DataCheckerDao instance;
@@ -96,13 +87,6 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
return false;
}
}
- if (bdpDS == null) {
- bdpDS = DataDruidFactory.getBDPInstance(props, log);
- if (bdpDS == null) {
- log.warn("Error getting job Druid DataSource instance");
- return false;
- }
- }
boolean systemCheck = Boolean.valueOf(props.getProperty(DataChecker.QUALITIS_SWITCH));
if (systemCheck && dopsDS == null) {
dopsDS = DataDruidFactory.getDopsInstance(props, log);//通过alibaba的druid数据库连接池获取JOB数据库连接
@@ -122,7 +106,7 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
}
log.info("(DataChecker info) database table partition info : " + dataCheckerInfo);
long waitTime = Long.valueOf(props.getProperty(DataChecker.WAIT_TIME, "1")) * 3600 * 1000;
- int queryFrequency = Integer.valueOf(props.getProperty(DataChecker.QUERY_FREQUENCY, "30000"));
+ int queryFrequency = Integer.valueOf(props.getProperty(DataChecker.QUERY_FREQUENCY, "60000"));
// String timeScape = props.getProperty(DataChecker.TIME_SCAPE, "NULL");
log.info("(DataChecker info) wait time : " + waitTime);
log.info("(DataChecker info) query frequency : " + queryFrequency);
@@ -134,13 +118,12 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
});
QualitisUtil qualitisUtil = new QualitisUtil(props);
try (Connection jobConn = jobDS.getConnection();
- Connection bdpConn = bdpDS.getConnection();
Connection dopsConn = dopsDS != null ? dopsDS.getConnection() : null) {
List allCheckRes = dataObjectList
.parallelStream()
.map(proObjectMap -> {
log.info("Begin to Check dataObject:" + proObjectMap.entrySet().toString());
- boolean checkRes = getDataCheckResult(proObjectMap, jobConn, bdpConn, dopsConn, props, log,action,qualitisUtil);
+ boolean checkRes = getDataCheckResult(proObjectMap, jobConn, dopsConn, props, log,action,qualitisUtil);
if (null != action.getExecutionRequestRefContext()) {
if (checkRes) {
action.getExecutionRequestRefContext().appendLog("Database table partition info : " + proObjectMap.get(DataChecker.DATA_OBJECT) + " has arrived");
@@ -178,7 +161,6 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
private boolean getDataCheckResult(Map proObjectMap,
Connection jobConn,
- Connection bdpConn,
Connection dopsConn,
Properties props,
Logger log,
@@ -190,12 +172,9 @@ private boolean getDataCheckResult(Map proObjectMap,
}
String objectNum = proObjectMap.get(DataChecker.DATA_OBJECT_NUM);
CheckDataObject dataObject;
- try {
+
dataObject = parseDataObject(dataObjectStr);
- } catch (SQLException e) {
- log.error("parse dataObject failed", e);
- return false;
- }
+
Predicate
@@ -81,7 +80,7 @@
com.google.guava
guava
- 28.2-android
+ 33.1.0-jre
diff --git a/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/conf/SchedulisConf.scala b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/conf/SchedulisConf.scala
new file mode 100644
index 0000000000..42a6b4cfcc
--- /dev/null
+++ b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/conf/SchedulisConf.scala
@@ -0,0 +1,32 @@
+ /*
+ *
+ * * Copyright 2019 WeBank
+ * *
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.appconn.schedulis.conf
+
+import java.lang
+import java.lang.reflect.Type
+
+import com.google.gson.{Gson, GsonBuilder, JsonElement, JsonPrimitive, JsonSerializationContext, JsonSerializer}
+
+object SchedulisConf {
+ implicit val gson:Gson = new GsonBuilder().setPrettyPrinting().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls
+ .registerTypeAdapter(classOf[java.lang.Double], new JsonSerializer[java.lang.Double] {
+ override def serialize(t: lang.Double, `type`: Type, jsonSerializationContext: JsonSerializationContext): JsonElement =
+ if(t == t.longValue()) new JsonPrimitive(t.longValue()) else new JsonPrimitive(t)
+ }).create
+}
diff --git a/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/http/SchedulisHttpAction.scala b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/http/SchedulisHttpAction.scala
new file mode 100644
index 0000000000..8c1543dd63
--- /dev/null
+++ b/dss-appconn/appconns/dss-dolphinscheduler-appconn/src/main/scala/com/webank/wedatasphere/dss/appconn/schedulis/http/SchedulisHttpAction.scala
@@ -0,0 +1,87 @@
+ /*
+ *
+ * * Copyright 2019 WeBank
+ * *
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.appconn.schedulis.http
+
+import java.io.{File, InputStream}
+import java.util
+
+import com.webank.wedatasphere.dss.appconn.schedulis.conf.SchedulisConf
+import org.apache.linkis.httpclient.request.{GetAction, HttpAction, POSTAction, UploadAction, UserAction}
+
+trait SchedulisHttpAction extends UserAction{
+
+ private var user:String = _
+
+ override def setUser(user: String): Unit = this.user = user
+
+ override def getUser: String = this.user
+
+}
+
+abstract class SchedulisGetAction extends GetAction with SchedulisHttpAction
+
+
+abstract class ScheudlisPostAction extends POSTAction with SchedulisHttpAction{
+
+ override def getRequestPayload: String = SchedulisConf.gson.toJson(getRequestPayloads)
+
+}
+
+
+
+
+case class SchedulisUploadAction(filePaths:Array[String],
+ _inputStreams:util.Map[String,InputStream],uploadUrl:String) extends ScheudlisPostAction with UploadAction with SchedulisHttpAction{
+
+ private val streamNames = new util.HashMap[String,String]
+
+ override val files: util.Map[String, String] = {
+ if (null == filePaths || filePaths.length == 0) new util.HashMap[String,String]() else{
+ val map = new java.util.HashMap[String, String]
+ filePaths foreach {
+ filePath => val arr = filePath.split(File.separator)
+ val fileName = arr(arr.length - 1)
+ map.put("file", filePath)
+ }
+ map
+ }
+ }
+
+ override def inputStreams: util.Map[String, InputStream] = _inputStreams
+
+ override def inputStreamNames: util.Map[String, String] = streamNames
+
+ private var _user:String = _
+
+ override def setUser(user: String): Unit = this._user = user
+
+ override def getUser: String = this._user
+
+ override def getRequestPayload: String = ""
+
+ override def getURL: String = uploadUrl
+}
+
+class SchedulisCreateProjectAction(url:String) extends ScheudlisPostAction{
+
+ override def getURL: String = url
+
+}
+
+
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
index 7539d9e681..01d05f70d8 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
@@ -61,7 +61,22 @@
dss-development-process-standard-execution
${dss.version}
-
+
+ com.squareup.okhttp3
+ okhttp
+ 4.2.2
+
+
+ org.jetbrains
+ annotations
+
+
+
+
+ cn.hutool
+ hutool-all
+ 5.8.21
+
org.apache.commons
commons-lang3
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
index 1854973ae2..fcec4ceca7 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
@@ -47,6 +47,7 @@ public class EventChecker implements Runnable{
public final static String TODAY="only.receive.today";
public final static String USE_RUN_DATE ="msg.receive.use.rundate";
public final static String AFTERSEND="msg.after.send";
+ public final static String CHANNEL_TYPE="msg.channel.type";
private Properties p;
private String jobId;
@@ -67,10 +68,10 @@ public EventChecker(Properties p, EventCheckerExecutionAction action) {
String waitTime = p.getProperty(EventChecker.WAIT_TIME, "1");
Double doubleWaitTime = Double.valueOf(waitTime) * 3600 * 1000;
maxWaitTime = Long.valueOf(doubleWaitTime.longValue());
- String query_frequency = p.getProperty(EventChecker.QUERY_FREQUENCY, "30000");
+ String query_frequency = p.getProperty(EventChecker.QUERY_FREQUENCY, "60000");
queryFrequency = Integer.valueOf(query_frequency);
- if(queryFrequency <10000){
- queryFrequency = 10000;
+ if(queryFrequency <60000){
+ queryFrequency = 60000;
}
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveRequest.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveRequest.java
new file mode 100644
index 0000000000..c540cc64bd
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveRequest.java
@@ -0,0 +1,70 @@
+package com.webank.wedatasphere.dss.appconn.eventchecker.entity;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Author: xlinliu
+ * Date: 2024/8/2
+ */
+public class HttpMsgReceiveRequest {
+ private String receiver;
+ private String topic;
+ private String msgName;
+ private String runDate;
+ private boolean onlyReceiveToday;
+ private boolean receiveUseRunDate;
+
+
+ public HttpMsgReceiveRequest(String receiver, String topic, String msgName, String runDate, boolean onlyReceiveToday, boolean receiveUseRunDate) {
+ this.receiver = receiver;
+ this.topic = topic;
+ this.msgName = msgName;
+ this.runDate = runDate;
+ this.onlyReceiveToday = onlyReceiveToday;
+ this.receiveUseRunDate = receiveUseRunDate;
+ }
+
+ public HttpMsgReceiveRequest() {
+
+ }
+
+ public HttpMsgReceiveRequest setReceiver(String receiver) {
+ this.receiver = receiver;
+ return this;
+ }
+
+ public HttpMsgReceiveRequest setTopic(String topic) {
+ this.topic = topic;
+ return this;
+ }
+
+ public HttpMsgReceiveRequest setMsgName(String msgName) {
+ this.msgName = msgName;
+ return this;
+ }
+
+ public HttpMsgReceiveRequest setRunDate(String runDate) {
+ this.runDate = runDate;
+ return this;
+ }
+
+ public HttpMsgReceiveRequest setOnlyReceiveToday(boolean onlyReceiveToday) {
+ this.onlyReceiveToday = onlyReceiveToday;
+ return this;
+ }
+
+ public HttpMsgReceiveRequest setReceiveUseRunDate(boolean receiveUseRunDate) {
+ this.receiveUseRunDate = receiveUseRunDate;
+ return this;
+ }
+
+
+ public String toJson() {
+ Gson gson = new GsonBuilder().create();
+ return gson.toJson(this);
+ }
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveResponse.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveResponse.java
new file mode 100644
index 0000000000..6ff0105051
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgReceiveResponse.java
@@ -0,0 +1,60 @@
+package com.webank.wedatasphere.dss.appconn.eventchecker.entity;
+
+import java.util.Map;
+
+/**
+ * Author: xlinliu
+ * Date: 2024/8/6
+ */
+public class HttpMsgReceiveResponse {
+ private int retCode;
+ private String message;
+ private long msgId;
+ private Map msgBody;
+ /**
+ * success成功
+ * fail失败
+ * running运行中
+ */
+ private String status;
+
+ public int getRetCode() {
+ return retCode;
+ }
+
+ public void setRetCode(int retCode) {
+ this.retCode = retCode;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
+ public long getMsgId() {
+ return msgId;
+ }
+
+ public void setMsgId(long msgId) {
+ this.msgId = msgId;
+ }
+
+ public Map getMsgBody() {
+ return msgBody;
+ }
+
+ public void setMsgBody(Map msgBody) {
+ this.msgBody = msgBody;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public void setStatus(String status) {
+ this.status = status;
+ }
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendRequest.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendRequest.java
new file mode 100644
index 0000000000..9240d2fdb2
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendRequest.java
@@ -0,0 +1,69 @@
+package com.webank.wedatasphere.dss.appconn.eventchecker.entity;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Author: xlinliu
+ * Date: 2024/8/2
+ */
+public class HttpMsgSendRequest {
+ private String sender;
+ private String topic;
+ private String msgName;
+ private String runDate;
+ private String msgId;
+ private Map msgBody;
+
+ public HttpMsgSendRequest(String sender, String topic, String msgName, String runDate, String msgId, Map msgBody) {
+ this.sender = sender;
+ this.topic = topic;
+ this.msgName = msgName;
+ this.runDate = runDate;
+ this.msgId = msgId;
+ this.msgBody = msgBody;
+ }
+
+ public HttpMsgSendRequest() {
+ this.msgBody = new HashMap<>();
+ }
+
+ public HttpMsgSendRequest setSender(String sender) {
+ this.sender = sender;
+ return this;
+ }
+
+ public HttpMsgSendRequest setTopic(String topic) {
+ this.topic = topic;
+ return this;
+ }
+
+ public HttpMsgSendRequest setMsgName(String msgName) {
+ this.msgName = msgName;
+ return this;
+ }
+
+ public HttpMsgSendRequest setRunDate(String runDate) {
+ this.runDate = runDate;
+ return this;
+ }
+
+ public HttpMsgSendRequest setMsgId(String msgId) {
+ this.msgId = msgId;
+ return this;
+ }
+
+
+ public HttpMsgSendRequest addMsgBodyField(String key, Object value) {
+ this.msgBody.put(key, value);
+ return this;
+ }
+
+ public String toJson() {
+ Gson gson = new GsonBuilder().create();
+ return gson.toJson(this);
+ }
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendResponse.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendResponse.java
new file mode 100644
index 0000000000..0ab97825a0
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/HttpMsgSendResponse.java
@@ -0,0 +1,26 @@
+package com.webank.wedatasphere.dss.appconn.eventchecker.entity;
+
+/**
+ * Author: xlinliu
+ * Date: 2024/8/6
+ */
+public class HttpMsgSendResponse {
+ private int retCode;
+ private String message;
+
+ public int getRetCode() {
+ return retCode;
+ }
+
+ public void setRetCode(int retCode) {
+ this.retCode = retCode;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
index 23ed8576fc..8d51d4cf1e 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
@@ -70,7 +70,7 @@ void initECParams(Properties props){
runDate = props.getProperty("run_date");
userTime = props.getProperty(EventChecker.USER_TIME);
waitTime = props.getProperty(EventChecker.WAIT_TIME, "1");
- query_frequency = props.getProperty(EventChecker.QUERY_FREQUENCY, "30000");
+ query_frequency = props.getProperty(EventChecker.QUERY_FREQUENCY, "60000");
afterSend = props.getProperty(EventChecker.AFTERSEND);
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheckReceiver.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheckReceiver.java
index 853eb381c1..f20095ea0a 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheckReceiver.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheckReceiver.java
@@ -26,10 +26,29 @@
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.util.Arrays;
import java.util.Date;
import java.util.Properties;
-public class AbstractEventCheckReceiver extends AbstractEventCheck{
+public abstract class AbstractEventCheckReceiver extends AbstractEventCheck{
+ String todayStartTime;
+ String todayEndTime;
+ String allStartTime;
+ String allEndTime;
+ String nowStartTime;
+
+ public AbstractEventCheckReceiver(Properties props) {
+ initECParams(props);
+ initReceiverTimes();
+ }
+
+ void initReceiverTimes(){
+ todayStartTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd 00:00:00");
+ todayEndTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd 23:59:59");
+ allStartTime = DateFormatUtils.format(new Date(), "10000-01-01 00:00:00");
+ allEndTime = DateFormatUtils.format(new Date(), "9999-12-31 23:59:59");
+ nowStartTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss");
+ }
/**
* Fill the result into the source
*/
@@ -39,14 +58,14 @@ String setConsumedMsg(Properties props, Logger log, String[] consumedMsgInfo){
if(consumedMsgInfo!=null && consumedMsgInfo.length == 4){
vNewMsgID = consumedMsgInfo[0];
String vMsgName = consumedMsgInfo[1];
- String vSender = consumedMsgInfo[2];
+ String vReceiver = consumedMsgInfo[2];
String vMsg = consumedMsgInfo[3];
if (null == vMsg) {
props.put(EventChecker.MSG, "NULL");
} else {
props.put(EventChecker.MSG, vMsg);
}
- log.info("Received message : messageID: " + vNewMsgID + ", messageName: " + vMsgName + ", receiver: " + vSender
+ log.info("Received message : messageID: " + vNewMsgID + ", messageName: " + vMsgName + ", receiver: " + vReceiver
+ ", messageBody: " + vMsg);
}
}catch (Exception e) {
@@ -114,8 +133,7 @@ String getOffset(int jobId, Properties props, Logger log){
String lastMsgId = "0";
try {
msgConn = getEventCheckerConnection(props,log);
- pstmtForGetID = msgConn.prepareStatement(sqlForReadMsgID, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
-// pstmtForGetID = msgConn.prepareCall(sqlForReadMsgID);
+ pstmtForGetID = msgConn.prepareCall(sqlForReadMsgID);
pstmtForGetID.setString(1, receiver);
pstmtForGetID.setString(2, topic);
pstmtForGetID.setString(3, msgName);
@@ -134,71 +152,54 @@ String getOffset(int jobId, Properties props, Logger log){
log.info("The last record id was " + lastMsgId);
return lastMsgId;
}
-
- /**
- * Consistent entrance to consumer message
- */
- String[] getMsg(Properties props, Logger log,String ... params){
- boolean useRunDate=Boolean.parseBoolean(params[3]);
- String sqlForReadTMsg;
- if(useRunDate){
- sqlForReadTMsg ="SELECT * FROM event_queue WHERE topic=? AND msg_name=? AND send_time >=? AND send_time <=? AND msg_id >? AND run_date =?ORDER BY msg_id ASC LIMIT 1";
- } else{
- sqlForReadTMsg="SELECT * FROM event_queue WHERE topic=? AND msg_name=? AND send_time >=? AND send_time <=? AND msg_id >? ORDER BY msg_id ASC LIMIT 1";
+ @Override
+ public boolean reciveMsg(int jobId, Properties props, Logger log) {
+ boolean result = false;
+ try{
+ String lastMsgId = getOffset(jobId,props,log);
+ String[] executeType = createExecuteType(jobId,props,log,lastMsgId);
+ log.info("event receiver executeType[]:{},{},{},{},{}",executeType[0],executeType[1],executeType[2],executeType[3],executeType[4]);
+ if(executeType!=null && executeType.length ==5){
+ String[] consumedMsgInfo = getMsg(jobId, props, log,executeType);
+ if(consumedMsgInfo!=null && consumedMsgInfo.length == 4){
+ result = updateMsgOffset(jobId,props,log,consumedMsgInfo,lastMsgId);
+ }
+ }else{
+ log.error("executeType error {} " , Arrays.toString(executeType));
+ return result;
+ }
+ }catch (Exception e){
+ log.error("EventChecker failed to receive the message" , e);
+ throw e;
}
+ return result;
+ }
- PreparedStatement pstmt = null;
- Connection msgConn = null;
- ResultSet rs = null;
- String[] consumedMsgInfo = null;
- try {
- msgConn = getEventCheckerConnection(props,log);
- pstmt = msgConn.prepareStatement(sqlForReadTMsg, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
-// pstmt = msgConn.prepareCall(sqlForReadTMsg);
- pstmt.setString(1, topic);
- pstmt.setString(2, msgName);
- pstmt.setString(3, params[0]);
- pstmt.setString(4, params[1]);
- pstmt.setString(5, params[2]);
- if(useRunDate){
- log.info("use run_date, run_date:{}", params[4]);
- pstmt.setString(6,params[4]);
- }
- log.info("param {} StartTime: " + params[0] + ", EndTime: " + params[1]
- + ", Topic: " + topic + ", MessageName: " + msgName + ", LastMessageID: " + params[2]);
- rs = pstmt.executeQuery();
- while (rs.next()) {
- consumedMsgInfo = new String[4];
- String[] msgKey = new String[]{"msg_id", "msg_name", "sender", "msg"};
- for (int i = 0; i < msgKey.length; i++) {
- try {
- consumedMsgInfo[i] = rs.getString(msgKey[i]);
- } catch (SQLException e) {
- throw new RuntimeException("Error while reading data from ResultSet", e);
- }
+ private String[] createExecuteType(int jobId, Properties props, Logger log,String lastMsgId){
+ boolean receiveTodayFlag = (null != receiveToday && "true".equals(receiveToday.trim().toLowerCase()));
+ boolean afterSendFlag = (null != afterSend && "true".equals(afterSend.trim().toLowerCase()));
+ //只有receiveTodayFlag为true时,useRunDateFlag才有意义。
+ Boolean useRunDateFlag = receiveTodayFlag && (null == useRunDate || "true".equalsIgnoreCase(useRunDate.trim()));
+ String[] executeType = null;
+ try {
+ if (receiveTodayFlag && !useRunDateFlag) {
+ if (afterSendFlag) {
+ executeType = new String[]{nowStartTime, todayEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
+ } else {
+ executeType = new String[]{todayStartTime, todayEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
+ }
+ } else {
+ if (afterSendFlag) {
+ executeType = new String[]{nowStartTime, allEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
+ } else {
+ executeType = new String[]{allStartTime, allEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
}
}
-
-// if(rs.last()){
-// consumedMsgInfo = new String[4];
-// String[] msgKey = new String[]{"msg_id","msg_name","sender","msg"};
-// for (int i = 0;i <= 3;i++) {
-// consumedMsgInfo[i] = rs.getString(msgKey[i]);
-// }
-// }
- } catch (SQLException e) {
- throw new RuntimeException("EventChecker failed to receive message" + e);
- } finally {
- closeQueryStmt(pstmt, log);
- closeConnection(msgConn, log);
- closeQueryRef(rs, log);
+ }catch(Exception e){
+ log.error("create executeType failed {}" + e);
}
- return consumedMsgInfo;
- }
-
- @Override
- public boolean reciveMsg(int jobId, Properties props, Logger log) {
- return super.reciveMsg(jobId, props, log);
+ return executeType;
}
+ public abstract String[] getMsg(int jobId,Properties props, Logger log,String ... params) ;
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/DefaultEventcheckReceiver.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/DefaultEventcheckReceiver.java
index 7689a997fc..4dd895ed4c 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/DefaultEventcheckReceiver.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/DefaultEventcheckReceiver.java
@@ -18,113 +18,73 @@
-import org.apache.commons.lang3.time.DateFormatUtils;
-import org.slf4j.Logger;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Date;
+import org.slf4j.Logger;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
import java.util.Properties;
public class DefaultEventcheckReceiver extends AbstractEventCheckReceiver {
- String todayStartTime;
- String todayEndTime;
- String allStartTime;
- String allEndTime;
- String nowStartTime;
public DefaultEventcheckReceiver(Properties props) {
- initECParams(props);
- initReceiverTimes();
- }
-
- private void initReceiverTimes(){
- todayStartTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd 00:00:00");
- todayEndTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd 23:59:59");
- allStartTime = DateFormatUtils.format(new Date(), "10000-01-01 00:00:00");
- allEndTime = DateFormatUtils.format(new Date(), "9999-12-31 23:59:59");
- nowStartTime = DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss");
+ super(props);
}
-
+ /**
+ * Consistent entrance to consumer message
+ */
@Override
- public boolean reciveMsg(int jobId, Properties props, Logger log) {
- boolean result = false;
- try{
- String lastMsgId = getOffset(jobId,props,log);
- String[] executeType = createExecuteType(jobId,props,log,lastMsgId);
- log.info("event receiver executeType[]:{},{},{},{},{}",executeType[0],executeType[1],executeType[2],executeType[3],executeType[4]);
- if(executeType!=null && executeType.length ==5){
- String[] consumedMsgInfo = getMsg(props, log,executeType);
- if(consumedMsgInfo!=null && consumedMsgInfo.length == 4){
- result = updateMsgOffset(jobId,props,log,consumedMsgInfo,lastMsgId);
- }
- }else{
- log.error("executeType error {} " + Arrays.toString(executeType));
- return result;
- }
- }catch (Exception e){
- log.error("EventChecker failed to receive the message {}" + e);
- return result;
+ public String[] getMsg(int jobId,Properties props, Logger log,String ... params){
+ boolean useRunDate=Boolean.parseBoolean(params[3]);
+ String sqlForReadTMsg;
+ if(useRunDate){
+ sqlForReadTMsg ="SELECT * FROM event_queue WHERE topic=? AND msg_name=? AND send_time >=? AND send_time <=? AND msg_id >? AND run_date =?ORDER BY msg_id ASC LIMIT 1";
+ } else{
+ sqlForReadTMsg="SELECT * FROM event_queue WHERE topic=? AND msg_name=? AND send_time >=? AND send_time <=? AND msg_id >? ORDER BY msg_id ASC LIMIT 1";
}
- return result;
- }
- private String[] createExecuteType(int jobId, Properties props, Logger log,String lastMsgId){
- boolean receiveTodayFlag = (null != receiveToday && "true".equals(receiveToday.trim().toLowerCase()));
- boolean afterSendFlag = (null != afterSend && "true".equals(afterSend.trim().toLowerCase()));
- //只有receiveTodayFlag为true时,useRunDateFlag才有意义。
- Boolean useRunDateFlag = receiveTodayFlag && (null == useRunDate || "true".equalsIgnoreCase(useRunDate.trim()));
- String[] executeType = null;
+ PreparedStatement pstmt = null;
+ Connection msgConn = null;
+ ResultSet rs = null;
+ String[] consumedMsgInfo = null;
try {
- if (receiveTodayFlag && !useRunDateFlag) {
- if (afterSendFlag) {
- executeType = new String[]{nowStartTime, todayEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
- } else {
- executeType = new String[]{todayStartTime, todayEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
- }
- } else {
- if (afterSendFlag) {
- executeType = new String[]{nowStartTime, allEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
- } else {
- executeType = new String[]{allStartTime, allEndTime, lastMsgId, useRunDateFlag.toString(), runDate};
- }
+ msgConn = getEventCheckerConnection(props,log);
+ pstmt = msgConn.prepareCall(sqlForReadTMsg);
+ pstmt.setString(1, topic);
+ pstmt.setString(2, msgName);
+ pstmt.setString(3, params[0]);
+ pstmt.setString(4, params[1]);
+ pstmt.setString(5, params[2]);
+ if(useRunDate){
+ log.info("use run_date, run_date:{}", params[4]);
+ pstmt.setString(6,params[4]);
}
- }catch(Exception e){
- log.error("create executeType failed {}" + e);
- }
- return executeType;
- }
-
- private void waitForTime(Logger log,Long waitTime){
- String waitForTime = wait_for_time;
- String formatWaitForTime = DateFormatUtils.format(new Date(),"yyyy-MM-dd " + waitForTime + ":00");
- DateFormat fmt =new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- Date targetWaitTime = new Date();
- try {
- targetWaitTime = fmt.parse(formatWaitForTime);
- } catch (ParseException e) {
- log.error("parse date failed {}" + e);
- }
+ log.info("param {} StartTime: " + params[0] + ", EndTime: " + params[1]
+ + ", Topic: " + topic + ", MessageName: " + msgName + ", LastMessageID: " + params[2]);
+ rs = pstmt.executeQuery();
- log.info("It will success at a specified time: " + targetWaitTime);
- long wt = targetWaitTime.getTime() - System.currentTimeMillis();
- if(wt > 0){
- //wt must less than wait.time
- if(wt <= waitTime){
- log.info("EventChecker will wait "+ wt + " milliseconds before starting execution");
- try {
- Thread.sleep(wt);
- } catch (InterruptedException e) {
- throw new RuntimeException("EventChecker throws an exception during the waiting time {}"+e);
+ while (rs.next()) {
+ consumedMsgInfo = new String[4];
+ String[] msgKey = new String[]{"msg_id", "msg_name", "sender", "msg"};
+ for (int i = 0; i < msgKey.length; i++) {
+ try {
+ consumedMsgInfo[i] = rs.getString(msgKey[i]);
+ } catch (SQLException e) {
+ throw new RuntimeException("Error while reading data from ResultSet", e);
+ }
}
- }else{
- throw new RuntimeException("The waiting time from Job starttime to wait.for.time"+ wt +"(ms) greater than wait.time , unreasonable setting!");
+ consumedMsgInfo[2] = receiver;
}
- }else{
- log.info("EventChecker has reached the specified time");
+ } catch (SQLException e) {
+ log.error("EventChecker failed to receive message",e);
+ throw new RuntimeException(e);
+ } finally {
+ closeQueryStmt(pstmt, log);
+ closeConnection(msgConn, log);
+ closeQueryRef(rs, log);
}
+ return consumedMsgInfo;
}
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/EventCheckerService.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/EventCheckerService.java
index 4583b6ea89..cd6bb36818 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/EventCheckerService.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/EventCheckerService.java
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.dss.appconn.eventchecker.service;
+import com.webank.wedatasphere.dss.appconn.eventchecker.entity.EventChecker;
import org.slf4j.Logger;
import java.util.Properties;
@@ -36,7 +37,17 @@ public static EventCheckerService getInstance() {
public boolean sendMsg(int jobId, Properties props, Logger log) {
if (props != null) {
- return new EventCheckSender(props).sendMsg(jobId, props, log);
+ String channelType = props.containsKey(EventChecker.CHANNEL_TYPE) ? props.getProperty(EventChecker.CHANNEL_TYPE)
+ : "DSS";
+ AbstractEventCheck eventCheck;
+ if("DSS".equalsIgnoreCase(channelType)) {
+ eventCheck = new EventCheckSender(props);
+ log.info("this is dss send task");
+ }else {
+ eventCheck = new HttpEventCheckSender(props);
+ log.info("this is kgas receive task");
+ }
+ return eventCheck.sendMsg(jobId, props, log);
} else {
log.error("create EventCheckSender failed {}");
return false;
@@ -52,7 +63,17 @@ public boolean sendMsg(int jobId, Properties props, Logger log) {
*/
public boolean reciveMsg(int jobId, Properties props, Logger log) {
if (props != null) {
- return new DefaultEventcheckReceiver(props).reciveMsg(jobId, props, log);
+ String channelType = props.containsKey(EventChecker.CHANNEL_TYPE) ? props.getProperty(EventChecker.CHANNEL_TYPE)
+ : "DSS";
+ AbstractEventCheck eventCheck;
+ if("DSS".equalsIgnoreCase(channelType)){
+ eventCheck= new DefaultEventcheckReceiver(props);
+ log.info("this is dss receive task");
+ }else{
+ eventCheck = new HttpEventcheckerReceiver(props);
+ log.info("this is kgas receive task");
+ }
+ return eventCheck.reciveMsg(jobId, props, log);
} else {
log.error("create EventCheckSender failed {}");
return false;
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventCheckSender.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventCheckSender.java
new file mode 100644
index 0000000000..f13f6bbe6c
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventCheckSender.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.appconn.eventchecker.service;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonSyntaxException;
+import com.webank.wedatasphere.dss.appconn.eventchecker.entity.HttpMsgSendRequest;
+import com.webank.wedatasphere.dss.appconn.eventchecker.entity.HttpMsgSendResponse;
+import com.webank.wedatasphere.dss.appconn.eventchecker.utils.EventCheckerHttpUtils;
+import okhttp3.Response;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+public class HttpEventCheckSender extends AbstractEventCheck {
+ private static final String HTTP_EVENT_SIGN_KEY = "msg.eventchecker.http.sign.key";
+ private static final String HTTP_EVENT_KGAS_SEND_URL = "msg.eventchecker.http.kgas.send.url";
+
+ public HttpEventCheckSender(Properties props) {
+ initECParams(props);
+ }
+
+ @Override
+ public boolean sendMsg(int jobId, Properties props, Logger log) {
+ boolean result = false;
+ String url = props.getProperty(HTTP_EVENT_KGAS_SEND_URL);
+ String key = props.getProperty(HTTP_EVENT_SIGN_KEY);
+ String timestamp = String.valueOf(System.currentTimeMillis());
+ String sign = EventCheckerHttpUtils.calculateSign(key, timestamp);
+ Map header = new HashMap<>();
+ header.put("sign", sign);
+ header.put("timestamp", timestamp);
+ Gson gson = new GsonBuilder().create();
+ Map msgBody = new HashMap<>();
+ long currentTimeMillis = System.currentTimeMillis();
+ int secondsSinceEpoch = (int) (currentTimeMillis / 1000);
+ String msgId = String.valueOf(secondsSinceEpoch);
+ if (StringUtils.isNoneBlank(msg)) {
+ try {
+ msgBody = gson.fromJson(msg, Map.class);
+ }catch (JsonSyntaxException jsonSyntaxException){
+ throw new RuntimeException("msg.body格式有误,请输入标准的json格式"+ jsonSyntaxException.getMessage());
+ }
+ }
+ HttpMsgSendRequest message = new HttpMsgSendRequest(sender, topic, msgName, runDate, msgId, msgBody);
+ String responseBody = null;
+ String messageJson = gson.toJson(message);
+ String requestStr = EventCheckerHttpUtils.requestToString(url, "POST", header, null, messageJson);
+ log.info("try to send http message,request{}",requestStr);
+ try (Response response = EventCheckerHttpUtils.post(url, header, null, messageJson)) {
+ HttpMsgSendResponse msgSendResponse;
+ try {
+ responseBody = response.body().string();
+ msgSendResponse = gson.fromJson(responseBody,
+ HttpMsgSendResponse.class);
+ } catch (Exception e){
+ throw new RuntimeException("请求KGAS失败,详情:" + responseBody);
+ }
+
+ int reCode = msgSendResponse.getRetCode();
+ if (reCode == 0) {
+ result = true;
+ log.info("send successfully.jobId:{}",jobId);
+ } else {
+ String errorMsg ="信号发送失败,详情:"+ responseBody;
+ log.error("send failed,response:{}", errorMsg);
+ throw new RuntimeException(errorMsg);
+ }
+
+ } catch (IOException e) {
+ String errorMsg = responseBody != null ? responseBody : "";
+
+ throw new RuntimeException(errorMsg,e);
+ }
+
+ return result;
+ }
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventcheckerReceiver.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventcheckerReceiver.java
new file mode 100644
index 0000000000..3fc71645b8
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/HttpEventcheckerReceiver.java
@@ -0,0 +1,105 @@
+package com.webank.wedatasphere.dss.appconn.eventchecker.service;
+
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.webank.wedatasphere.dss.appconn.eventchecker.entity.HttpMsgReceiveRequest;
+import com.webank.wedatasphere.dss.appconn.eventchecker.entity.HttpMsgReceiveResponse;
+import com.webank.wedatasphere.dss.appconn.eventchecker.utils.EventCheckerHttpUtils;
+import okhttp3.Response;
+import org.slf4j.Logger;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Author: xlinliu
+ * Date: 2024/8/1
+ */
+public class HttpEventcheckerReceiver extends AbstractEventCheckReceiver{
+ private static final String HTTP_EVENT_SIGN_KEY = "msg.eventchecker.http.sign.key";
+ private static final String HTTP_EVENT_KGAS_RECEIVE_URL = "msg.eventchecker.http.kgas.receive.url";
+
+
+
+ public HttpEventcheckerReceiver(Properties props) {
+ super(props);
+ }
+
+ @Override
+ public String[] getMsg(int jobId,Properties props, Logger log, String... params) {
+ String url=props.getProperty(HTTP_EVENT_KGAS_RECEIVE_URL);
+ String key=props.getProperty(HTTP_EVENT_SIGN_KEY);
+ String timestamp=String.valueOf( System.currentTimeMillis());
+ String sign = EventCheckerHttpUtils.calculateSign(key,timestamp);
+ Map header = new HashMap<>();
+ header.put("sign", sign);
+ header.put("timestamp", timestamp);
+ //params = new String[]{nowStartTime, todayEndTime, lastMsgId, useRunDateFlag.toString(), runDate}
+ boolean useRunDate=Boolean.parseBoolean(params[3]);
+ Long lastMsgId = Long.valueOf(params[2]);
+ String runDate=params[4];
+ Gson gson = new GsonBuilder().create();
+ boolean receiveTodayFlag = (null != receiveToday && "true".equalsIgnoreCase(receiveToday.trim()));
+ HttpMsgReceiveRequest message = new HttpMsgReceiveRequest(receiver, topic, msgName, runDate, receiveTodayFlag, useRunDate);
+ String[] consumedMsgInfo = null;
+ String responseBody = null;
+ String messageJson = gson.toJson(message);
+ try (Response response = EventCheckerHttpUtils.post(url, header, null, messageJson)) {
+ HttpMsgReceiveResponse msgReceiveResponse;
+ try {
+ responseBody = response.body().string();
+ msgReceiveResponse = gson.fromJson(responseBody,
+ HttpMsgReceiveResponse.class);
+ }catch (Exception e){
+ throw new RuntimeException("请求KGAS失败,详情:" + responseBody);
+ }
+ int reCode = msgReceiveResponse.getRetCode();
+ if (reCode == 0 ) {
+ log.info("receive request successfully.jobId:{}",jobId);
+ if("SUCCESS".equalsIgnoreCase(msgReceiveResponse.getStatus())) {
+ log.info("receive successfully,now try to parse message.jobId:{}",jobId);
+ String msgBodyJson = gson.toJson(msgReceiveResponse.getMsgBody());
+ Long msgId = msgReceiveResponse.getMsgId();
+ if (msgId > lastMsgId) {
+ log.info("receive new message successfully.jobId:{}", jobId);
+ //{"msg_id", "msg_name", "receiver", "msg"};
+ consumedMsgInfo = new String[]{msgId.toString(), msgName, receiver, msgBodyJson};
+ }
+ }else if ("FAILED".equalsIgnoreCase(msgReceiveResponse.getStatus())){
+ log.error("receive failed,response:{}", responseBody);
+ String errorMsg = "信号接收失败。详情:"+responseBody;
+ throw new RuntimeException(errorMsg);
+ }else{
+ log.info("receive failed,will try again later ,retCode:{},status:{},message:{}",
+ msgReceiveResponse.getRetCode()
+ ,msgReceiveResponse.getStatus()
+ ,msgReceiveResponse.getMessage()
+ );
+ consumedMsgInfo = null;
+ }
+ } else if (reCode == 9998) {
+ log.info("message has not send to third system,will try again later ,retCode:{},status:{},message:{}",
+ msgReceiveResponse.getRetCode()
+ ,msgReceiveResponse.getStatus()
+ ,msgReceiveResponse.getMessage()
+ );
+ consumedMsgInfo = null;
+ } else {
+ String requestStr = EventCheckerHttpUtils.requestToString(url, "POST", header, null, messageJson);
+ log.error("receive failed,request:{}", requestStr);
+ log.error("receive failed,response:{}", responseBody);
+ String errorMsg = "信号接收失败。详情:"+responseBody;
+ throw new RuntimeException(errorMsg);
+ }
+ return consumedMsgInfo;
+ } catch (IOException e) {
+ String errorMsg = responseBody != null ? responseBody: "";
+ throw new RuntimeException(errorMsg,e);
+ }
+ }
+
+
+}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/utils/EventCheckerHttpUtils.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/utils/EventCheckerHttpUtils.java
new file mode 100644
index 0000000000..4aaaae70a2
--- /dev/null
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/utils/EventCheckerHttpUtils.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.appconn.eventchecker.utils;
+
+
+import cn.hutool.crypto.digest.DigestUtil;
+import okhttp3.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+public class EventCheckerHttpUtils {
+
+ private static final Logger logger = LoggerFactory.getLogger(EventCheckerHttpUtils.class);
+
+ /**
+ * 发送get请求
+ *
+ * @param url 地址
+ * @param params 参数
+ * @return 请求结果
+ */
+ public static Response get(String url, Map headerMap, Map params) throws IOException {
+ return request("GET", url, headerMap, params,null);
+ }
+
+ /**
+ * 发送post请求
+ *
+ * @param url 地址
+ * @param params 参数
+ * @return 请求结果
+ */
+ public static Response post(String url, Map headerMap, Map params, String jsonBody) throws IOException {
+
+ return request("POST",url, headerMap, params,jsonBody);
+ }
+
+ /**
+ * 发送http请求
+ *
+ * @param method 请求方法
+ * @param url 地址
+ * @param headerMap 可以为null
+ * @param params 参数map,可以为null
+ * @param jsonBody json body,可以为null
+ * @return 请求结果
+ */
+ public static Response request(String method, String url, Map headerMap,
+ Map params, String jsonBody) throws IOException {
+
+ if (method == null) {
+ throw new RuntimeException("请求方法不能为空");
+ }
+
+ if (url == null) {
+ throw new RuntimeException("url不能为空");
+ }
+
+ HttpUrl.Builder httpBuilder = HttpUrl.parse(url).newBuilder();
+
+ if (params != null) {
+ for (Map.Entry param : params.entrySet()) {
+ httpBuilder.addQueryParameter(param.getKey(), param.getValue());
+ }
+ }
+ Headers headers = setHeaderParams(headerMap);
+ RequestBody body = jsonBody == null ? null : RequestBody.Companion.create(jsonBody,
+ MediaType.Companion.parse("application/json"));
+ Request request = new Request.Builder()
+ .url(httpBuilder.build())
+ .method(method, body)
+ .headers(headers)
+ .build();
+
+
+ OkHttpClient client = new OkHttpClient.Builder()
+ .connectTimeout(10, TimeUnit.SECONDS)
+ .writeTimeout(20, TimeUnit.SECONDS)
+ .readTimeout(20, TimeUnit.SECONDS)
+ .build();
+ Response response = client.newCall(request).execute();
+ logger.info("eventChecker http request successfully,url:{},retCode:{}", url, response.code());
+ return response;
+ }
+
+
+
+ /**
+ * 添加headers
+ *
+ * @param headerParams
+ * @return
+ */
+ private static Headers setHeaderParams(Map headerParams) {
+ Headers.Builder headersbuilder = new Headers.Builder();
+ if (headerParams != null) {
+ headerParams.forEach(headersbuilder::add);
+ }
+ return headersbuilder.build();
+
+ }
+
+ // 鉴权方法
+ public static String calculateSign( String key,String timestamp) {
+
+ // 计算签名
+ return DigestUtil.sha256Hex(key + timestamp);
+ }
+ public static String requestToString(String url, String method, Map headerMap,
+ Map params,
+ String jsonBody){
+ HttpUrl.Builder httpBuilder = HttpUrl.parse(url).newBuilder();
+
+ if (params != null) {
+ for (Map.Entry param : params.entrySet()) {
+ httpBuilder.addQueryParameter(param.getKey(), param.getValue());
+ }
+ }
+ Headers headers = setHeaderParams(headerMap);
+ RequestBody body = jsonBody == null ? null : RequestBody.Companion.create(jsonBody,
+ MediaType.Companion.parse("application/json"));
+ Request request = new Request.Builder()
+ .url(httpBuilder.build())
+ .method(method, body)
+ .headers(headers)
+ .build();
+
+
+ StringBuilder sb = new StringBuilder();
+ sb.append("Request: ").append(request.method()).append(" ").append(request.url()).append("\n");
+ sb.append("Headers: ").append(request.headers()).append("\n");
+ sb.append("Body: ").append(jsonBody);
+ return sb.toString();
+ }
+
+ public static void main(String[] args) throws IOException {
+ Response response = get("http://10.107.116.246:8088/api/rest_j/v1/dss/scriptis/proxy/addUserProxy", null, null);
+ System.out.println(response.body().string());
+ }
+
+}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
index ae044adb08..0cd8349a5d 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
index 821c3a6d0a..c08b0ada54 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
@@ -29,5 +29,8 @@ public class AzkabanConstant {
public final static String FLOW_CONTEXT_ID = "wds.linkis.flow.contextID=";
public final static String LINKIS_VERSION = "linkis.version";
public final static String JOB_COMMENT = "comment";
+ public final static String AUTO_DISABLED = "auto.disabled";
+
+ public final static String WTSS_PREFIX = "wtss.";
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/AzkabanWorkflowToRelSynchronizer.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/AzkabanWorkflowToRelSynchronizer.java
index 31cbb70e0e..a4714db747 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/AzkabanWorkflowToRelSynchronizer.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/AzkabanWorkflowToRelSynchronizer.java
@@ -56,7 +56,7 @@ public class AzkabanWorkflowToRelSynchronizer implements WorkflowToRelSynchroniz
private DSSToRelConversionOperation dssToRelConversionOperation;
//匹配wtss返回的错误信息
private static final Pattern ERROR_PATTERN = Pattern.compile("(?<=Error uploading project properties)[\\s\\S]+.job");
- private static final int SCHEDULIS_MAX_SIZE = 250;
+ private static final int SCHEDULIS_MAX_SIZE = 1024;
public void init() {
String baseUrl = dssToRelConversionOperation.getConversionService().getAppInstance().getBaseUrl();
@@ -139,7 +139,7 @@ private String dealSchedulisErrorMsg(String errorMsg) {
Matcher matcher = ERROR_PATTERN.matcher(errorMsg);
if (matcher.find() && matcher.group().length() >= SCHEDULIS_MAX_SIZE) {
errorMsg = "wokflow name " + matcher.group().split("/")[1] + " is to long, please abide the rules of schedulis: projectName + workflowName*3 + 12 <= 250 " +
- "(工作流名称太长,需要满足规则 项目名长度 + 工作流长度*3 + 12 <= 250)"; }
+ "(工作流名称太长,需要满足规则 项目名长度 + 工作流长度*3 + 12 <= 1024)"; }
return errorMsg;
}
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanNodeRelConverter.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanNodeRelConverter.java
index 8e24ac5801..6d7d739ced 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanNodeRelConverter.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanNodeRelConverter.java
@@ -20,6 +20,7 @@
import com.webank.wedatasphere.dss.appconn.schedulis.constant.AzkabanConstant;
import com.webank.wedatasphere.dss.appconn.schedulis.entity.AzkabanWorkflow;
import com.webank.wedatasphere.dss.appconn.schedulis.linkisjob.LinkisJobConverter;
+import com.webank.wedatasphere.dss.appconn.schedulis.linkisjob.WTSSJobConverter;
import com.webank.wedatasphere.dss.common.entity.Resource;
import com.webank.wedatasphere.dss.common.exception.DSSErrorException;
import com.webank.wedatasphere.dss.common.utils.ClassUtils;
@@ -78,6 +79,11 @@ private void writeNodeToJobLocal(WorkflowNode workflowNode, String storePath) th
FileUtils.forceMkdir(jobDirFile);
File jobFile = new File(storePath,workflowNode.getName() + AzkabanConstant.AZKABAN_JOB_SUFFIX);
jobFile.createNewFile();
+ if(workflowNode.getNodeType()!=null && workflowNode.getNodeType().startsWith(AzkabanConstant.WTSS_PREFIX)){
+ nodeConverter = ClassUtils.getInstanceOrDefault(NodeConverter.class, new WTSSJobConverter());
+ }else{
+ nodeConverter = ClassUtils.getInstanceOrDefault(NodeConverter.class, new LinkisJobConverter());
+ }
String nodeString = nodeConverter.conversion(workflowNode);
os = FileUtils.openOutputStream(jobFile,true);
os.write(nodeString.getBytes());
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanRelConverter.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanRelConverter.java
index a413d9d933..d949ec7e92 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanRelConverter.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/conversion/WorkflowToAzkbanRelConverter.java
@@ -105,9 +105,6 @@ private String getFlowResourceStringPrefix(String projectStorePath, String store
private void writeFlowPropertiesToLocal(AzkabanWorkflow flow) throws DSSErrorException {
List> flowProperties = flow.getFlowProperties();
- if (flowProperties == null || flowProperties.isEmpty()) {
- return;
- }
FileOutputStream os = null;
try {
String storePath = flow.getStorePath();
@@ -115,9 +112,11 @@ private void writeFlowPropertiesToLocal(AzkabanWorkflow flow) throws DSSErrorExc
flowPrpsFile.createNewFile();
os = FileUtils.openOutputStream(flowPrpsFile, true);
StringBuilder stringBuilder = new StringBuilder();
- flowProperties.forEach(p -> p.forEach((k, v) -> {
- stringBuilder.append(AzkabanConstant.LINKIS_FLOW_VARIABLE_KEY + k + "=" + v + "\n");
- }));
+ if(flowProperties != null) {
+ flowProperties.forEach(p -> p.forEach((k, v) -> {
+ stringBuilder.append(AzkabanConstant.LINKIS_FLOW_VARIABLE_KEY + k + "=" + v + "\n");
+ }));
+ }
// update by peaceWong add contextID to Flow properties
String contextID = flow.getContextID();
if (StringUtils.isNotBlank(contextID)) {
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
index 76e7ef8858..2d269872f1 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
@@ -23,6 +23,8 @@ public class LinkisJob {
private String type;
private String linkistype;
private String proxyUser;
+
+ private String autoDisabled;
private String dependencies;
private Map conf;
private String command;
@@ -60,6 +62,14 @@ public void setProxyUser(String proxyUser) {
this.proxyUser = proxyUser;
}
+ public String getAutoDisabled() {
+ return autoDisabled;
+ }
+
+ public void setAutoDisabled(String autoDisabled) {
+ this.autoDisabled = autoDisabled;
+ }
+
public String getDependencies() {
return dependencies;
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
index f175b6ca3c..8f569be455 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
@@ -70,6 +70,7 @@ private String convertJobToString(LinkisJob job){
map.put(WorkflowConstant.PROXY_USER,job.getProxyUser());
map.put(AzkabanConstant.JOB_COMMAND,job.getCommand());
map.put(AzkabanConstant.JOB_COMMENT,job.getComment());
+ map.put(AzkabanConstant.AUTO_DISABLED,job.getAutoDisabled());
Map labels = new HashMap<>(1);
labels.put("route", SchedulerConf.JOB_LABEL.getValue());
map.put(AzkabanConstant.JOB_LABELS, DSSCommonUtils.COMMON_GSON.toJson(labels));
@@ -114,7 +115,22 @@ private void convertConfiguration(WorkflowNode workflowNode, LinkisJob job){
configuration.forEach((k,v)-> {
if(null!=v) {
v.forEach((k2, v2) -> {
- if(null !=v2) {job.getConf().put(confprefix + k + "." + k2, v2.toString());}
+ if(v2!=null) {
+ String vStr;
+ if (v2 instanceof Number) {
+ Number numValue = (Number) v2;
+ vStr = numValue.longValue() == numValue.doubleValue() ?
+ String.valueOf(numValue.longValue()) :
+ numValue.toString();
+ } else {
+ vStr = v2.toString();
+ }
+ if (AzkabanConstant.AUTO_DISABLED.equals(k2) ) {
+ job.setAutoDisabled(vStr);
+ } else {
+ job.getConf().put(confprefix + k + "." + k2, vStr);
+ }
+ }
});
}
});
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/WTSSJobConverter.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/WTSSJobConverter.java
new file mode 100644
index 0000000000..0705957026
--- /dev/null
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/WTSSJobConverter.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.appconn.schedulis.linkisjob;
+
+import com.webank.wedatasphere.dss.appconn.scheduler.utils.SchedulerConf;
+import com.webank.wedatasphere.dss.appconn.schedulis.conf.AzkabanConf;
+import com.webank.wedatasphere.dss.appconn.schedulis.constant.AzkabanConstant;
+import com.webank.wedatasphere.dss.appconn.schedulis.conversion.NodeConverter;
+import com.webank.wedatasphere.dss.common.utils.DSSCommonUtils;
+import com.webank.wedatasphere.dss.workflow.core.constant.WorkflowConstant;
+import com.webank.wedatasphere.dss.workflow.core.entity.WorkflowNode;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class WTSSJobConverter implements NodeConverter {
+
+ private LinkisJobTuning[] linkisJobTunings;
+
+ public WTSSJobConverter(){
+ this.linkisJobTunings = new LinkisJobTuning[]{new AzkabanSubFlowJobTuning()};
+ }
+
+ @Override
+ public String conversion(WorkflowNode workflowNode){
+ return baseConversion(workflowNode);
+ }
+
+ private String baseConversion(WorkflowNode workflowNode){
+ LinkisJob job = new LinkisJob();
+ job.setConf(new HashMap<>());
+ job.setName(workflowNode.getName());
+ job.setComment(workflowNode.getDSSNode().getDesc());
+ convertHead(workflowNode,job);
+ convertDependencies(workflowNode,job);
+ convertProxyUser(workflowNode,job);
+ convertConfiguration(workflowNode,job);
+ convertJobCommand(workflowNode,job);
+ Arrays.stream(linkisJobTunings).forEach(t ->{
+ if(t.ifJobCantuning(workflowNode.getNodeType())) {
+ t.tuningJob(job);
+ }
+ });
+ return convertJobToString(job);
+ }
+
+ private String convertJobToString(LinkisJob job){
+ HashMap map = new HashMap<>(8);
+ map.put(AzkabanConstant.LINKIS_VERSION, AzkabanConf.LINKIS_VERSION.getValue());
+ map.put(AzkabanConstant.JOB_TYPE,job.getType());
+ map.put(AzkabanConstant.LINKIS_TYPE,job.getLinkistype());
+ map.put(AzkabanConstant.ZAKABAN_DEPENDENCIES_KEY,job.getDependencies());
+ map.put(WorkflowConstant.PROXY_USER,job.getProxyUser());
+ map.put(AzkabanConstant.JOB_COMMAND,job.getCommand());
+ map.put(AzkabanConstant.JOB_COMMENT,job.getComment());
+ map.put(AzkabanConstant.AUTO_DISABLED,job.getAutoDisabled());
+ Map labels = new HashMap<>(1);
+ labels.put("route", SchedulerConf.JOB_LABEL.getValue());
+ map.put(AzkabanConstant.JOB_LABELS, DSSCommonUtils.COMMON_GSON.toJson(labels));
+ map.putAll(job.getConf());
+ StringBuilder stringBuilder = new StringBuilder();
+ map.forEach((k,v)->{
+ if(v != null) {
+ //for value contains "\n"
+ v = v.replace("\n", ";");
+ stringBuilder.append(k).append("=").append(v).append("\n");
+ }
+ });
+ return stringBuilder.toString();
+ }
+
+ private void convertHead(WorkflowNode workflowNode, LinkisJob job){
+ job.setType("eventchecker");
+ job.setLinkistype(workflowNode.getNodeType());
+ }
+
+ private void convertDependencies(WorkflowNode workflowNode, LinkisJob job){
+ List dependencys = workflowNode.getDSSNode().getDependencys();
+ if(dependencys != null && !dependencys.isEmpty()) {
+ StringBuilder dependencies = new StringBuilder();
+ dependencys.forEach(d -> dependencies.append(d).append(","));
+ job.setDependencies(dependencies.substring(0,dependencies.length()-1));
+ }
+ }
+
+ private void convertProxyUser(WorkflowNode workflowNode, LinkisJob job){
+ String userProxy = workflowNode.getDSSNode().getUserProxy();
+ if(!StringUtils.isEmpty(userProxy)) {
+ job.setProxyUser(userProxy);
+ }
+ }
+
+ private void convertConfiguration(WorkflowNode workflowNode, LinkisJob job){
+ Map params = workflowNode.getDSSNode().getParams();
+ if (params != null && !params.isEmpty()) {
+ Map> configuration = (Map>) params.get("configuration");
+ configuration.forEach((k,v)-> {
+ if(null!=v) {
+ v.forEach((k2, v2) -> {
+ if(AzkabanConstant.AUTO_DISABLED.equals(k2) && null !=v2){job.setAutoDisabled(v2.toString());}
+ else if(null !=v2) {
+ String vStr;
+ if (v2 instanceof Number) {
+ Number numValue = (Number) v2;
+ vStr = numValue.longValue() == numValue.doubleValue() ?
+ String.valueOf(numValue.longValue()) :
+ numValue.toString();
+ } else {
+ vStr = v2.toString();
+ }
+ if("only.receive.today".equals(k2)){
+ job.getConf().put("msg.rece.today", vStr);
+ }else if("max.receive.hours".equals(k2)){
+ job.getConf().put("wait.time", vStr);
+ }else{
+ job.getConf().put(k2, vStr);
+ }
+ }
+ });
+ }
+ });
+ }
+ }
+
+ private void convertJobCommand(WorkflowNode workflowNode, LinkisJob job){
+ Map jobContent = workflowNode.getDSSNode().getJobContent();
+ if(jobContent != null) {
+ jobContent.remove("jobParams");
+ job.setCommand(DSSCommonUtils.COMMON_GSON.toJson(jobContent));
+ }
+ }
+}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
index b15e20d308..7514853d6d 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
@@ -28,6 +28,7 @@ public ProjectResponseRef searchProject(RefProjectContentRequestRef.RefProjectCo
params.put("project", requestRef.getProjectName());
params.put("ajax", "fetchprojectflows");
try {
+ logger.info("request url from Schedulis is: {}.", queryUrl);
String responseBody = SchedulisHttpUtils.getHttpGetResult(queryUrl, params, ssoRequestOperation, requestRef.getWorkspace());
logger.info("responseBody from Schedulis is: {}.", responseBody);
Map map = DSSCommonUtils.COMMON_GSON.fromJson(responseBody, new TypeToken>(){}.getType());
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectUpdateOperation.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectUpdateOperation.java
index 5b4d9a4417..b52919478a 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectUpdateOperation.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectUpdateOperation.java
@@ -43,9 +43,9 @@ public void init() {
@Override
public ResponseRef updateProject(ProjectUpdateRequestRef.ProjectUpdateRequestRefImpl projectRef) {
- if (CollectionUtils.isNotEmpty(projectRef.getDSSProjectPrivilege().getReleaseUsers())) {
- // 先校验运维用户是否存在于 Schedulis,如果不存在,则不能成功创建工程。
- projectRef.getDSSProjectPrivilege().getReleaseUsers().forEach(releaseUser -> {
+ if (CollectionUtils.isNotEmpty(projectRef.getAddedDSSProjectPrivilege().getReleaseUsers())) {
+ // 先校验新增的运维用户是否存在于 Schedulis,如果不存在,则不能成功创建工程。
+ projectRef.getAddedDSSProjectPrivilege().getReleaseUsers().forEach(releaseUser -> {
if (!AzkabanUserService.containsUser(releaseUser, getBaseUrl(), ssoRequestOperation, projectRef.getWorkspace())) {
throw new ExternalOperationFailedException(100323, "当前设置的发布用户: " + releaseUser + ", 在 Schedulis 系统中不存在,请在Schedulis中创建该用户!");
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/service/AzkabanUserService.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/service/AzkabanUserService.java
index 2a4cccbda0..a5ea2ceeb5 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/service/AzkabanUserService.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/service/AzkabanUserService.java
@@ -62,6 +62,7 @@ private static List requestUserId(String releaseUser, String
}
} catch (Exception e) {
LOGGER.error("update all releaseUsers from Schedulis url {} failed.", baseUrl, e);
+ throw e;
}
return newEntityList;
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/utils/AzkabanUtils.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/utils/AzkabanUtils.java
index 6d0588d48a..9fa22d3def 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/utils/AzkabanUtils.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/utils/AzkabanUtils.java
@@ -20,6 +20,7 @@
import org.apache.commons.lang.StringUtils;
import java.util.Map;
+import java.util.Objects;
public class AzkabanUtils {
@@ -27,7 +28,7 @@ public static String handleAzkabanEntity(String entityString) {
if(StringUtils.isNotBlank(entityString)){
if(entityString.startsWith("{") && entityString.endsWith("}")){
Map resMap = DSSCommonUtils.COMMON_GSON.fromJson(entityString, Map.class);
- if(resMap.containsKey("error")){
+ if(resMap.containsKey("error") && !Objects.isNull(resMap.get("error"))){
return (String)resMap.get("error");
}
}
diff --git a/dss-appconn/appconns/dss-scriptis-appconn/pom.xml b/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
index 1fbb75e667..d41686c53e 100644
--- a/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-scriptis-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sendemail-appconn/pom.xml b/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
index 792ccf982b..29dcd96a8b 100644
--- a/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-sendemail-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
index 050c80d844..19a2c7b403 100644
--- a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
+++ b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/src/main/scala/com/webank/wedatasphere/dss/appconn/sendemail/emailcontent/parser/AbstractEmailContentParser.scala b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/src/main/scala/com/webank/wedatasphere/dss/appconn/sendemail/emailcontent/parser/AbstractEmailContentParser.scala
index f58d908f28..54d8cf5ec1 100644
--- a/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/src/main/scala/com/webank/wedatasphere/dss/appconn/sendemail/emailcontent/parser/AbstractEmailContentParser.scala
+++ b/dss-appconn/appconns/dss-sendemail-appconn/sendemail-appconn-core/src/main/scala/com/webank/wedatasphere/dss/appconn/sendemail/emailcontent/parser/AbstractEmailContentParser.scala
@@ -27,7 +27,7 @@ import org.apache.linkis.common.io.resultset.ResultSetReader
import org.apache.linkis.common.io.{MetaData, Record}
import org.apache.linkis.common.utils.Utils
import org.apache.linkis.storage.LineRecord
-import org.apache.linkis.storage.resultset.ResultSetReaderFactory
+import org.apache.linkis.storage.resultset.ResultSetReader
import org.apache.commons.io.IOUtils
abstract class AbstractEmailContentParser[T] extends EmailContentParser {
@@ -42,8 +42,8 @@ abstract class AbstractEmailContentParser[T] extends EmailContentParser {
case _ =>
}
- protected def getResultSetReader(fsPathStore: FsPathStoreEmailContent): ResultSetReader[_, _ ] = {
- val reader = ResultSetReaderFactory.getResultSetReader(fsPathStore.getFsPath.getSchemaPath)
+ protected def getResultSetReader(fsPathStore: FsPathStoreEmailContent): ResultSetReader[_ <: MetaData, _ <: Record] = {
+ val reader = ResultSetReader.getResultSetReader(fsPathStore.getFsPath.getSchemaPath)
reader.getMetaData
reader
}
diff --git a/dss-appconn/appconns/dss-sso-appconn/pom.xml b/dss-appconn/appconns/dss-sso-appconn/pom.xml
index 2b4eb6f8a6..4fcc9d371a 100644
--- a/dss-appconn/appconns/dss-sso-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-sso-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-workflow-appconn/pom.xml b/dss-appconn/appconns/dss-workflow-appconn/pom.xml
index 8ec03fe62c..a7c476b66a 100644
--- a/dss-appconn/appconns/dss-workflow-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-workflow-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefCopyOperation.java b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefCopyOperation.java
index de702dd629..ebb4a19914 100644
--- a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefCopyOperation.java
+++ b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefCopyOperation.java
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.dss.appconn.workflow.opertion;
+import com.webank.wedatasphere.dss.common.label.DSSLabel;
import com.webank.wedatasphere.dss.common.utils.RpcAskUtils;
import com.webank.wedatasphere.dss.orchestrator.common.ref.OrchestratorRefConstant;
import com.webank.wedatasphere.dss.sender.service.DSSSenderServiceFactory;
@@ -28,6 +29,7 @@
import org.apache.linkis.rpc.Sender;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -54,7 +56,9 @@ public RefJobContentResponseRef copyRef(ThirdlyRequestRef.CopyWitContextRequestR
workflowCopyRequestRef.getWorkspace(), appId, contextIdStr,
projectName, version, description, workflowCopyRequestRef.getDSSLabels(),
targetProjectId, (String) nodeSuffix.orElse(null), (String) newFlowName.orElse(null));
- ResponseCopyWorkflow responseCopyWorkflow = RpcAskUtils.processAskException(sender.ask(requestCopyWorkflow),
+ List dssLabels = workflowCopyRequestRef.getDSSLabels();
+ Sender tempSend = DSSSenderServiceFactory.getOrCreateServiceInstance().getWorkflowSender(dssLabels);
+ ResponseCopyWorkflow responseCopyWorkflow = RpcAskUtils.processAskException(tempSend.ask(requestCopyWorkflow),
ResponseCopyWorkflow.class, RequestCopyWorkflow.class);
Map refJobContent = new HashMap<>(2);
refJobContent.put(OrchestratorRefConstant.ORCHESTRATION_ID_KEY, responseCopyWorkflow.getDssFlow().getId());
diff --git a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefExportOperation.java b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefExportOperation.java
index 038db59133..febdcdf8ae 100644
--- a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefExportOperation.java
+++ b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefExportOperation.java
@@ -48,7 +48,8 @@ public ExportResponseRef exportRef(ThirdlyRequestRef.RefJobContentRequestRefImpl
projectId,
projectName,
toJson(requestRef.getWorkspace()),
- requestRef.getDSSLabels());
+ requestRef.getDSSLabels(),
+ true);
Sender sender = DSSSenderServiceFactory.getOrCreateServiceInstance().getWorkflowSender(requestRef.getDSSLabels());
ResponseExportWorkflow responseExportWorkflow = RpcAskUtils.processAskException(sender.ask(requestExportWorkflow),
ResponseExportWorkflow.class, RequestExportWorkflow.class);
diff --git a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefImportOperation.java b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefImportOperation.java
index 1ccab9a8ca..bee804b055 100644
--- a/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefImportOperation.java
+++ b/dss-appconn/appconns/dss-workflow-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/workflow/opertion/WorkflowRefImportOperation.java
@@ -17,7 +17,6 @@
package com.webank.wedatasphere.dss.appconn.workflow.opertion;
import com.webank.wedatasphere.dss.common.protocol.JobStatus;
-import com.webank.wedatasphere.dss.common.utils.MapUtils;
import com.webank.wedatasphere.dss.common.utils.RpcAskUtils;
import com.webank.wedatasphere.dss.orchestrator.common.ref.OrchestratorRefConstant;
import com.webank.wedatasphere.dss.sender.service.DSSSenderServiceFactory;
@@ -48,6 +47,9 @@ public RefJobContentResponseRef importRef(ThirdlyRequestRef.ImportWitContextRequ
requestRef.getNewVersion(),
requestRef.getWorkspace(),
requestRef.getContextId(), requestRef.getDSSLabels());
+ if("true".equals( requestRef.getParameter("isOldPackageStruct"))){
+ requestImportWorkflow.setOldPackageStruct(true);
+ }
Sender sender = DSSSenderServiceFactory.getOrCreateServiceInstance().getWorkflowSender(requestRef.getDSSLabels());
ResponseImportWorkflow responseImportWorkflow = RpcAskUtils.processAskException(sender.ask(requestImportWorkflow),
diff --git a/dss-appconn/dss-appconn-core/pom.xml b/dss-appconn/dss-appconn-core/pom.xml
index 60f42dc4bb..fb23c61acb 100644
--- a/dss-appconn/dss-appconn-core/pom.xml
+++ b/dss-appconn/dss-appconn-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-appconn-loader/pom.xml b/dss-appconn/dss-appconn-loader/pom.xml
index 2155e4f32c..46064bb03e 100644
--- a/dss-appconn/dss-appconn-loader/pom.xml
+++ b/dss-appconn/dss-appconn-loader/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
index f12b02dd25..4baf611a53 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/conf/AppConnManagerClientConfiguration.java b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/conf/AppConnManagerClientConfiguration.java
index e6fbc5e707..7628fee54d 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/conf/AppConnManagerClientConfiguration.java
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/conf/AppConnManagerClientConfiguration.java
@@ -21,7 +21,8 @@
public class AppConnManagerClientConfiguration {
- public final static CommonVars DSS_APPCONN_CLIENT_TOKEN = CommonVars.apply("wds.dss.appconn.client.user.token","172.0.0.1");
+ public final static CommonVars DSS_APPCONN_CLIENT_TOKEN =
+ CommonVars.apply("wds.dss.appconn.client.user.token","xxx");
public final static CommonVars LINKIS_ADMIN_USER = CommonVars.apply("wds.dss.appconn.client.user","ws");
public final static CommonVars APPCONN_WAIT_MAX_TIME = CommonVars.apply("wds.dss.appconn.client.load.wait.max.time",new TimeType("3m"));
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/service/AppConnResourceServiceImpl.java b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/service/AppConnResourceServiceImpl.java
index 2f16d6fdb8..25a1c0ef3b 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/service/AppConnResourceServiceImpl.java
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-client/src/main/java/com/webank/wedatasphere/dss/appconn/manager/service/AppConnResourceServiceImpl.java
@@ -125,6 +125,8 @@ public String getAppConnHome(AppConnInfo appConnInfo) {
} catch (DSSErrorException e) {
throw new AppConnHomeNotExistsWarnException(20350, "Unzip " + zipFilePath + " failed, AppConn is " + appConnName, e);
}
+ //解压完了,zip包就没用了,删掉。
+ deleteFile(zipFilePath, "Delete the zip file " + zipFilePath.getName() + " of AppConn " + appConnName + " failed");
File oldIndexFile = AppConnIndexFileUtils.getIndexFile(appConnPath);
// delete old index file.
@@ -157,6 +159,7 @@ public String getAppConnHome(AppConnInfo appConnInfo) {
private void deleteFile(File file, String errorMsg) {
try {
+ LOGGER.info("delete appconn file:{}", file.getAbsolutePath());
FileUtils.forceDelete(file);
} catch (IOException e) {
if(StringUtils.isNotEmpty(errorMsg)) {
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
index a2693bc65a..e02a3e9919 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/src/main/java/com/webank/wedatasphere/dss/appconn/manager/impl/AbstractAppConnManager.java b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/src/main/java/com/webank/wedatasphere/dss/appconn/manager/impl/AbstractAppConnManager.java
index a6d019e7ab..c811e515c0 100644
--- a/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/src/main/java/com/webank/wedatasphere/dss/appconn/manager/impl/AbstractAppConnManager.java
+++ b/dss-appconn/dss-appconn-manager/dss-appconn-manager-core/src/main/java/com/webank/wedatasphere/dss/appconn/manager/impl/AbstractAppConnManager.java
@@ -43,6 +43,7 @@
import org.slf4j.LoggerFactory;
import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.Collectors;
@@ -52,7 +53,7 @@ public abstract class AbstractAppConnManager implements AppConnManager {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAppConnManager.class);
private final AppConnLoader appConnLoader = AppConnLoaderFactory.getAppConnLoader();
- private final Map appConns = new HashMap<>();
+ private final Map appConns = new ConcurrentHashMap<>();
private volatile boolean isLoaded = false;
private List appConnList = null;
AppConnInfoService appConnInfoService;
@@ -60,6 +61,8 @@ public abstract class AbstractAppConnManager implements AppConnManager {
private AppConnRefreshThread appConnRefreshThread;
private static volatile AppConnManager appConnManager;
+
+ private static volatile boolean appConnManagerInited = false;
private static boolean lazyLoad = false;
public static void setLazyLoad() {
@@ -67,11 +70,11 @@ public static void setLazyLoad() {
}
public static AppConnManager getAppConnManager() {
- if (appConnManager != null) {
+ if (appConnManagerInited) {
return appConnManager;
}
synchronized (AbstractAppConnManager.class) {
- if (appConnManager == null) {
+ if (!appConnManagerInited) {
//appconn-manager-core包无法引入manager-client包,会有maven循环依赖,这里通过反射获取client的实现类
//ismanager=false时,获取client端的AppConnManager实现类,ismanager=true时,获取appconn-framework端的AppConnManager实现类。
if (Objects.equals(AppConnManagerCoreConf.IS_APPCONN_MANAGER.getValue(), AppConnManagerCoreConf.hostname)
@@ -87,6 +90,7 @@ public static AppConnManager getAppConnManager() {
LOGGER.info("The instance of AppConnManager is {}.", appConnManager.getClass().getName());
appConnManager.init();
}
+ appConnManagerInited = true;
return appConnManager;
}
}
@@ -101,6 +105,7 @@ public void init() {
loadAppConns();
isLoaded = true;
}
+ LOGGER.info("AppConnManager init successfully");
}
protected abstract AppConnInfoService createAppConnInfoService();
@@ -117,9 +122,6 @@ protected void loadAppConns() {
LOGGER.warn("No AppConnInfos returned, ignore it.");
return;
}
- long refreshInterval = AppInstanceConstants.APP_CONN_REFRESH_INTERVAL.getValue().toLong();
- appConnRefreshThread = new AppConnRefreshThread(this, appConnInfos);
- Utils.defaultScheduler().scheduleAtFixedRate(appConnRefreshThread, refreshInterval, refreshInterval, TimeUnit.MILLISECONDS);
Map appConns = new HashMap<>();
Consumer loadAndAdd = DSSExceptionUtils.handling(appConnInfo -> {
AppConn appConn = loadAppConn(appConnInfo);
@@ -150,6 +152,9 @@ protected void loadAppConns() {
appConnList = Collections.unmodifiableList(new ArrayList<>(appConns.values()));
}
LOGGER.info("Inited all AppConns, the AppConn list are {}.", this.appConns.keySet());
+ long refreshInterval = AppInstanceConstants.APP_CONN_REFRESH_INTERVAL.getValue().toLong();
+ appConnRefreshThread = new AppConnRefreshThread(this, appConnInfos);
+ Utils.defaultScheduler().scheduleAtFixedRate(appConnRefreshThread, refreshInterval, refreshInterval, TimeUnit.MILLISECONDS);
}
protected AppConn loadAppConn(AppConnInfo appConnInfo) throws Exception {
diff --git a/dss-appconn/dss-appconn-manager/pom.xml b/dss-appconn/dss-appconn-manager/pom.xml
index e2b61cf83f..501acce760 100644
--- a/dss-appconn/dss-appconn-manager/pom.xml
+++ b/dss-appconn/dss-appconn-manager/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/dss-appconn/dss-scheduler-appconn/pom.xml b/dss-appconn/dss-scheduler-appconn/pom.xml
index cc14c6c04e..b760026009 100644
--- a/dss-appconn/dss-scheduler-appconn/pom.xml
+++ b/dss-appconn/dss-scheduler-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/dss-appconn/linkis-appconn-engineplugin/pom.xml b/dss-appconn/linkis-appconn-engineplugin/pom.xml
index 363d409dee..4f0be8c54f 100644
--- a/dss-appconn/linkis-appconn-engineplugin/pom.xml
+++ b/dss-appconn/linkis-appconn-engineplugin/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/dss-appconn/linkis-appconn-engineplugin/src/main/resources/linkis-engineconn.properties b/dss-appconn/linkis-appconn-engineplugin/src/main/resources/linkis-engineconn.properties
index 01b9fe8084..5e3cb3eab5 100644
--- a/dss-appconn/linkis-appconn-engineplugin/src/main/resources/linkis-engineconn.properties
+++ b/dss-appconn/linkis-appconn-engineplugin/src/main/resources/linkis-engineconn.properties
@@ -37,7 +37,7 @@ wds.linkis.server.mybatis.datasource.url=jdbc:mysql://127.0.0.1:3306/${databaseN
wds.linkis.server.mybatis.datasource.username=
-***REMOVED***
+wds.linkis.server.mybatis.datasource.password=
wds.linkis.gateway.ip=127.0.0.1
wds.linkis.gateway.port=9001
diff --git a/dss-appconn/linkis-appconn-engineplugin/src/main/scala/org/apache/linkis/manager/engineplugin/appconn/executor/AbstractExecutionRequestRefContext.scala b/dss-appconn/linkis-appconn-engineplugin/src/main/scala/org/apache/linkis/manager/engineplugin/appconn/executor/AbstractExecutionRequestRefContext.scala
index b631b447d5..3d91c60192 100644
--- a/dss-appconn/linkis-appconn-engineplugin/src/main/scala/org/apache/linkis/manager/engineplugin/appconn/executor/AbstractExecutionRequestRefContext.scala
+++ b/dss-appconn/linkis-appconn-engineplugin/src/main/scala/org/apache/linkis/manager/engineplugin/appconn/executor/AbstractExecutionRequestRefContext.scala
@@ -17,6 +17,7 @@
package org.apache.linkis.manager.engineplugin.appconn.executor
import java.util
+
import com.webank.wedatasphere.dss.standard.app.development.listener.core.ExecutionRequestRefContext
import com.webank.wedatasphere.dss.standard.app.development.listener.exception.AppConnExecutionErrorException
import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader, ResultSetWriter}
@@ -27,7 +28,7 @@ import org.apache.linkis.manager.engineplugin.appconn.conf.AppConnEngineConnConf
import org.apache.linkis.rpc.Sender
import org.apache.linkis.storage.FSFactory
import org.apache.linkis.storage.fs.FileSystem
-import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReaderFactory}
+import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader}
abstract class AbstractExecutionRequestRefContext(engineExecutorContext: EngineExecutionContext,
user: String,
@@ -69,10 +70,8 @@ abstract class AbstractExecutionRequestRefContext(engineExecutorContext: EngineE
resultSetAlias: String): ResultSetWriter[M, R] =
engineExecutorContext.createResultSetWriter(resultSet, resultSetAlias).asInstanceOf[ResultSetWriter[M, R]]
- override def getResultSetReader[M <: MetaData, R <: Record](fsPath: FsPath): ResultSetReader[M, R] = {
-
- ResultSetReaderFactory.getResultSetReader(fsPath.getSchemaPath).asInstanceOf[ResultSetReader[M, R]]
- }
+ override def getResultSetReader[M <: MetaData, R <: Record](fsPath: FsPath): ResultSetReader[M, R] =
+ ResultSetReader.getResultSetReader(fsPath.getSchemaPath).asInstanceOf[ResultSetReader[M, R]]
private def createResultSetWriter[M <: MetaData, R <: Record](resultSetType: String, resultSetAlias: String): ResultSetWriter[M, R] =
engineExecutorContext.createResultSetWriter(resultSetType, resultSetAlias).asInstanceOf[ResultSetWriter[M, R]]
diff --git a/dss-appconn/pom.xml b/dss-appconn/pom.xml
index 616ea2f666..bbdfbf351f 100644
--- a/dss-appconn/pom.xml
+++ b/dss-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../pom.xml
4.0.0
diff --git a/dss-apps/dss-apiservice-server/pom.xml b/dss-apps/dss-apiservice-server/pom.xml
index 0c82fbb7ec..25e955b29b 100644
--- a/dss-apps/dss-apiservice-server/pom.xml
+++ b/dss-apps/dss-apiservice-server/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.10.0-SNAPSHOT
../../pom.xml
4.0.0
@@ -140,11 +140,6 @@
-
- org.postgresql
- postgresql
- 42.3.3
-
org.apache.linkis
linkis-mybatis
@@ -348,6 +343,12 @@
provided
+
+ com.sun.jersey
+ jersey-core
+ 1.19.4
+
+
diff --git a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/config/ApiServiceConfiguration.java b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/config/ApiServiceConfiguration.java
index 969e055e7b..a4d4c9ac21 100644
--- a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/config/ApiServiceConfiguration.java
+++ b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/config/ApiServiceConfiguration.java
@@ -20,7 +20,7 @@
public class ApiServiceConfiguration {
- public final static CommonVars LINKIS_AUTHOR_USER_TOKEN = CommonVars.apply("wds.linkis.client.api.service.author.user.token","172.0.0.1");
+ public final static CommonVars LINKIS_AUTHOR_USER_TOKEN = CommonVars.apply("wds.linkis.client.api.service.author.user.token","");
public final static CommonVars LINKIS_ADMIN_USER = CommonVars.apply("wds.linkis.client.api.service.adminuser","ws");
public final static CommonVars LINKIS_CONNECTION_TIMEOUT = CommonVars.apply("wds.linkis.flow.connection.timeout",30000);
diff --git a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/execute/ExecuteCodeHelper.java b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/execute/ExecuteCodeHelper.java
index 19b0304775..8cac07324f 100644
--- a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/execute/ExecuteCodeHelper.java
+++ b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/execute/ExecuteCodeHelper.java
@@ -195,10 +195,11 @@ public static String getResultList(JobExecuteResult executeResult,UJESClient cl
- public static String getResultContent(String user, String path, int maxSize, UJESClient client) {
+ public static String getResultContent(String user, String path, int maxSize, UJESClient client, boolean enableLimit) {
return client.resultSet(ResultSetAction.builder()
.setPath(path)
.setUser(user)
+ .setEnableLimit(enableLimit)
.setPageSize(maxSize).build()).getResponseBody();
}
diff --git a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/restful/ApiServiceExecuteRestfulApi.java b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/restful/ApiServiceExecuteRestfulApi.java
index f6a66fc6ed..7a085b2efd 100644
--- a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/restful/ApiServiceExecuteRestfulApi.java
+++ b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/restful/ApiServiceExecuteRestfulApi.java
@@ -173,7 +173,8 @@ public Message openFile(HttpServletRequest req,
@RequestParam(required = false, name = "taskId") String taskId,
@RequestParam(required = false, name = "page", defaultValue = "1") Integer page,
@RequestParam(required = false, name = "pageSize", defaultValue = "5000") Integer pageSize,
- @RequestParam(required = false, name = "charset", defaultValue = "utf-8") String charset) {
+ @RequestParam(required = false, name = "charset", defaultValue = "utf-8") String charset,
+ @RequestParam(required = false, name = "enableLimit", defaultValue = "false") Boolean enableLimit) {
String userName = SecurityFilter.getLoginUsername(req);
logger.info("User {} wants to open resultSet file {} in task {}.", userName, path, taskId);
if (!isNumber(taskId)) {
@@ -191,7 +192,7 @@ public Message openFile(HttpServletRequest req,
} else if (userName.equals(apiServiceJob.getSubmitUser())) {
UJESClient client = LinkisJobSubmit.getClient();
try {
- String fileContent = ExecuteCodeHelper.getResultContent(apiServiceJob.getProxyUser(), path, pageSize, client);
+ String fileContent = ExecuteCodeHelper.getResultContent(apiServiceJob.getProxyUser(), path, pageSize, client,enableLimit);
return DSSCommonUtils.COMMON_GSON.fromJson(fileContent, Message.class);
} catch (Exception e) {
logger.error("User {} fetch resultSet {} failed.", userName, path, e);
diff --git a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceImpl.java b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceImpl.java
index f42bd1e142..8fc1584550 100644
--- a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceImpl.java
+++ b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceImpl.java
@@ -491,7 +491,7 @@ private Map uploadBml(String userName, String scriptPath, Map variableList=null;
if(metadata.entrySet().size() >0) {
Variable[] v = VariableParser.getVariables(metadata);
- variableList = Arrays.stream(v).filter(var -> !StringUtils.isEmpty(var.getValue())).collect(Collectors.toList());
+ variableList = Arrays.stream(v).filter(var -> !StringUtils.isEmpty(var.value())).collect(Collectors.toList());
}
if(variableList!=null) {
@@ -520,7 +520,7 @@ private Map updateBml(String userName, String resourceId, String
try {
ScriptFsWriter writer = StorageScriptFsWriter.getScriptFsWriter(new FsPath(scriptPath), Consts.UTF_8.toString(), null);
Variable[] v = VariableParser.getVariables(metadata);
- List variableList = Arrays.stream(v).filter(var -> !StringUtils.isEmpty(var.getValue())).collect(Collectors.toList());
+ List variableList = Arrays.stream(v).filter(var -> !StringUtils.isEmpty(var.value())).collect(Collectors.toList());
writer.addMetaData(new ScriptMetaData(variableList.toArray(new Variable[0])));
writer.addRecord(new ScriptRecord(scriptContent));
InputStream inputStream = writer.getInputStream();
diff --git a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceQueryServiceImpl.java b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceQueryServiceImpl.java
index 492b153fe7..436efbd034 100644
--- a/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceQueryServiceImpl.java
+++ b/dss-apps/dss-apiservice-server/src/main/java/com/webank/wedatasphere/dss/apiservice/core/service/impl/ApiServiceQueryServiceImpl.java
@@ -383,10 +383,9 @@ private Pair