diff --git a/README-ZH.md b/README-ZH.md
index 5a2eb02d29..0d04b86795 100644
--- a/README-ZH.md
+++ b/README-ZH.md
@@ -105,9 +105,7 @@
如果您想试用 Demo 环境,请加入DataSphere Studio社区用户群(**加群方式请翻到本文档末尾处**),联系 **WeDataSphere 入群机器人** 获取邀请码。
- DataSphereStudio Demo 环境用户注册页面:[点我进入](https://dss-open.wedatasphere.com/#/register)
-
- DataSphereStudio Demo 环境登录页面:[点我进入](https://dss-open.wedatasphere.com/#/login)
+ DataSphereStudio Demo 环境登录页面:[点我进入](http://106.52.77.156:8085/)
## 下载
@@ -138,7 +136,7 @@
- [DSS 的 Exchangis AppConn 插件安装指南](https://github.com/WeDataSphere/Exchangis/blob/master/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md)
-- [DSS 的 Streamis AppConn 插件安装指南](https://github.com/WeBankFinTech/Streamis/blob/main/docs/zh_CN/0.2.0/development/StreamisAppConn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
+- [DSS 的 Streamis AppConn 插件安装指南](https://github.com/WeBankFinTech/Streamis/blob/main/docs/zh_CN/%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3/AppConn%E7%9A%84%E4%BD%BF%E7%94%A8.md)
- [DSS 的 Prophecis AppConn 插件安装指南](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
@@ -170,7 +168,7 @@
对 DSS 的任何问题和建议,敬请提交 issue,以便跟踪处理和经验沉淀共享。
- 您也可以扫描下面的二维码,加入我们的 微信/QQ群,以获得更快速的响应。
+ 您也可以扫描下面的二维码,加入我们的 微信群,以获得更快速的响应。

diff --git a/README.md b/README.md
index 2dd206c7ba..31bb3436b8 100644
--- a/README.md
+++ b/README.md
@@ -103,9 +103,7 @@ English | [中文](README-ZH.md)
If you want to try out the Demo environment, please join the DataSphere Studio community user group (**Please refer to the end of the document**), and contact **WeDataSphere Group Robot** to get an invitation code.
- DataSphereStudio Demo environment user registration page: [click me to enter](https://dss-open.wedatasphere.com/#/register)
-
- DataSphereStudio Demo environment login page: [click me to enter](https://dss-open.wedatasphere.com/#/login)
+ DataSphereStudio Demo environment login page: [click me to enter](http://106.52.77.156:8085/)
## Download
@@ -136,7 +134,7 @@ English | [中文](README-ZH.md)
- [Exchangis AppConn Plugin Installation Guide for DSS](https://github.com/WeDataSphere/Exchangis/blob/master/docs/en_US/ch1/exchangis_appconn_deploy_en.md)
-- [Streamis AppConn Plugin Installation Guide for DSS](https://github.com/WeBankFinTech/Streamis/blob/main/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md)
+- [Streamis AppConn Plugin Installation Guide for DSS](https://github.com/WeBankFinTech/Streamis/blob/main/docs/en_US/0.3.0/development/StreamisAppConnInstallationDocument.md)
- [Prophecis AppConn Plugin Installation Guide for DSS](https://github.com/WeBankFinTech/Prophecis/blob/master/docs/zh_CN/Deployment_Documents/Prophecis%20Appconn%E5%AE%89%E8%A3%85%E6%96%87%E6%A1%A3.md)
@@ -168,7 +166,7 @@ English | [中文](README-ZH.md)
For any questions or suggestions, please kindly submit an issue.
- You can scan the QR code below to join our WeChat and QQ group to get more immediate response.
+ You can scan the QR code below to join our WeChat to get more immediate response.

diff --git a/dss-appconn/appconns/dss-sparketl-appconn/src/main/icons/sparketl.icon b/dss-appconn/appconns/dss-sparketl-appconn/src/main/icons/sparketl.icon
new file mode 100644
index 0000000000..bf047fb7b2
--- /dev/null
+++ b/dss-appconn/appconns/dss-sparketl-appconn/src/main/icons/sparketl.icon
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/dss-appconn/appconns/dss-sparketl-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/sparketl/SparkEtlAppConn.java b/dss-appconn/appconns/dss-sparketl-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/sparketl/SparkEtlAppConn.java
index ce2c8e819d..1f6dfc3dfc 100644
--- a/dss-appconn/appconns/dss-sparketl-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/sparketl/SparkEtlAppConn.java
+++ b/dss-appconn/appconns/dss-sparketl-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/sparketl/SparkEtlAppConn.java
@@ -9,8 +9,6 @@
public class SparkEtlAppConn extends AbstractAppConn implements OnlyDevelopmentAppConn {
- public static final String SPARKETL_APPCONN_NAME = CommonVars.apply("wds.dss.appconn.sparketl.name", "sparketl").getValue();
-
private SparkEtlDevelopmentStandard developmentStandard;
@Override
@@ -23,7 +21,4 @@ public DevelopmentIntegrationStandard getOrCreateDevelopmentStandard() {
return developmentStandard;
}
-
-
-
}
diff --git a/dss-appconn/appconns/dss-sparketl-appconn/src/main/resources/init.sql b/dss-appconn/appconns/dss-sparketl-appconn/src/main/resources/init.sql
index e915f34fd8..00943e7c1b 100644
--- a/dss-appconn/appconns/dss-sparketl-appconn/src/main/resources/init.sql
+++ b/dss-appconn/appconns/dss-sparketl-appconn/src/main/resources/init.sql
@@ -1 +1,49 @@
-- 适用于第一次安装时
+select @sparketl_appconnId:=id from `dss_appconn` where `appconn_name` = 'sparketl';
+delete from `dss_appconn_instance` where `appconn_id` = @sparketl_appconnId;
+
+delete from dss_appconn where appconn_name = "sparketl";
+INSERT INTO `dss_appconn` (`appconn_name`, `is_user_need_init`, `level`, `if_iframe`, `is_external`, `reference`, `class_name`, `appconn_class_path`, `resource`)
+VALUES ('sparketl', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.dss.appconn.sparketl.sparketlAppConn', 'DSS_INSTALL_HOME_VAL/dss-appconns/sparketl', '');
+
+select @sparketl_appconnId:=id from `dss_appconn` where `appconn_name` = 'sparketl';
+
+INSERT INTO `dss_appconn_instance` (`appconn_id`, `label`, `url`, `enhance_json`, `homepage_uri`)
+VALUES (@sparketl_appconnId, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', '#/sparketl');
+
+delete from dss_workflow_node where appconn_name = "sparketl";
+insert into `dss_workflow_node` (`name`, `appconn_name`, `node_type`, `jump_type`, `support_jump`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `icon_path`)
+values('sparketl','sparketl','linkis.appconn.sparketl.bml2linkis','1','1','1','1','0','icons/sparketl.icon');
+
+select @sparketl_nodeId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.sparketl.bml2linkis';
+
+delete from `dss_workflow_node_to_group` where `node_id`=@sparketl_nodeId;
+delete from `dss_workflow_node_to_ui` where `workflow_node_id`=@sparketl_nodeId;
+
+-- 查找节点所属组的id
+select @sparketl_node_groupId:=id from `dss_workflow_node_group` where `name` = '数据交换';
+
+INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values (@sparketl_nodeId, @sparketl_node_groupId);
+
+-- 考虑表中有的是重复记录,最好加上limit 1
+select @sparketl_node_ui_lable_name_1:=id from `dss_workflow_node_ui` where `lable_name` = '节点名' limit 1;
+select @sparketl_node_ui_lable_name_2:=id from `dss_workflow_node_ui` where `lable_name` = '节点描述' limit 1;
+select @sparketl_node_ui_lable_name_3:=id from `dss_workflow_node_ui` where `lable_name` = '业务标签' limit 1;
+select @sparketl_node_ui_lable_name_4:=id from `dss_workflow_node_ui` where `lable_name` = '应用标签' limit 1;
+select @sparketl_node_ui_lable_name_5:=id from `dss_workflow_node_ui` where `lable_name` = 'wds-linkis-yarnqueue' limit 1;
+select @sparketl_node_ui_lable_name_6:=id from `dss_workflow_node_ui` where `lable_name` = 'spark-driver-memory' limit 1;
+select @sparketl_node_ui_lable_name_7:=id from `dss_workflow_node_ui` where `lable_name` = 'spark-executor-memory' limit 1;
+select @sparketl_node_ui_lable_name_8:=id from `dss_workflow_node_ui` where `lable_name` = 'spark-executor-cores' limit 1;
+select @sparketl_node_ui_lable_name_9:=id from `dss_workflow_node_ui` where `lable_name` = 'spark-executor-instances' limit 1;
+select @sparketl_node_ui_lable_name_10:=id from `dss_workflow_node_ui` where `lable_name` = '是否复用引擎' limit 1;
+
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_1);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_2);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_3);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_4);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_5);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_6);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_7);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_8);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_9);
+INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@sparketl_nodeId, @sparketl_node_ui_lable_name_10);
diff --git a/dss-framework/dss-appconn-framework/src/main/java/com/webank/wedatasphere/dss/framework/appconn/restful/AppConn2LinkisRefExecutionRestfulApi.java b/dss-framework/dss-appconn-framework/src/main/java/com/webank/wedatasphere/dss/framework/appconn/restful/AppConn2LinkisRefExecutionRestfulApi.java
index e43c2440d7..b505761062 100644
--- a/dss-framework/dss-appconn-framework/src/main/java/com/webank/wedatasphere/dss/framework/appconn/restful/AppConn2LinkisRefExecutionRestfulApi.java
+++ b/dss-framework/dss-appconn-framework/src/main/java/com/webank/wedatasphere/dss/framework/appconn/restful/AppConn2LinkisRefExecutionRestfulApi.java
@@ -12,6 +12,7 @@
import com.webank.wedatasphere.dss.standard.app.development.utils.DevelopmentOperationUtils;
import com.webank.wedatasphere.dss.standard.app.sso.Workspace;
import com.webank.wedatasphere.dss.standard.common.desc.AppInstance;
+import com.webank.wedatasphere.dss.standard.common.desc.AppInstanceImpl;
import org.apache.linkis.server.Message;
import org.apache.linkis.server.security.SecurityFilter;
import org.slf4j.Logger;
@@ -33,7 +34,9 @@ public class AppConn2LinkisRefExecutionRestfulApi {
private static final Logger LOGGER = LoggerFactory.getLogger(AppConn2LinkisRefExecutionRestfulApi.class);
+ private AppInstance emptyAppInstance = new AppInstanceImpl();
+ // 仅仅用于表示依赖,希望AppConnManagerRestfulApi先完成所有AppConn的加载之后,本Restful再对外提供服务。
@Autowired
private AppConnManagerRestfulApi appConnManagerRestfulApi;
@@ -49,8 +52,10 @@ public Message execute(HttpServletRequest request, @RequestBody Map labels = Arrays.asList(new EnvDSSLabel(labelStr));
- if(appConn.getAppDesc().getAppInstances().size() == 1) {
- appInstance = appConn.getAppDesc().getAppInstances().get(0);
+ // 原则上,AppConn2Linkis类型的AppConn,要么一个AppInstance都没有,要么就需按照AppConn的规范去录入AppInstance.
+ if(appConn.getAppDesc().getAppInstances().isEmpty()) {
+ LOGGER.info("AppConn {} has no appInstance, use empty appInstance to get operation.", appConnName);
+ appInstance = emptyAppInstance;
} else {
appInstance = appConn.getAppDesc().getAppInstancesByLabels(labels).get(0);
}
@@ -61,6 +66,7 @@ public Message execute(HttpServletRequest request, @RequestBody Map)executionParams.get("params"));
+ Map params = (Map) executionParams.get("params");
+ if (!params.isEmpty()) {
+ linkisAppConnJob.getLogObj().info("add params from external resources: " + params);
+ // 为防止第三方传回 {"runtime":{},"startup":{},"variable":{},"special":{}},将原params已有的runtime、startup等属性的map覆盖为空
+ // 因此以下一个个去确认
+ Map runtimeMap = TaskUtils.getRuntimeMap(params);
+ if(!runtimeMap.isEmpty()) {
+ TaskUtils.addRuntimeMap(linkisAppConnJob.getParams(), runtimeMap);
+ }
+ Map specialMap = TaskUtils.getSpecialMap(params);
+ if(!specialMap.isEmpty()) {
+ TaskUtils.addSpecialMap(linkisAppConnJob.getParams(), specialMap);
+ }
+ Map startupMap = TaskUtils.getStartupMap(params);
+ if(!startupMap.isEmpty()) {
+ TaskUtils.addStartupMap(linkisAppConnJob.getParams(), startupMap);
+ }
+ Map variableMap = TaskUtils.getVariableMap(params);
+ if(!variableMap.isEmpty()) {
+ TaskUtils.addVariableMap(linkisAppConnJob.getParams(), variableMap);
+ }
}
}
dealExecutionParams(linkisAppConnJob, executionParams);
diff --git a/dss-orchestrator/orchestrators/dss-workflow/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/service/impl/BuildJobActionImpl.java b/dss-orchestrator/orchestrators/dss-workflow/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/service/impl/BuildJobActionImpl.java
index ddd6eefcb4..47a53cadd3 100644
--- a/dss-orchestrator/orchestrators/dss-workflow/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/service/impl/BuildJobActionImpl.java
+++ b/dss-orchestrator/orchestrators/dss-workflow/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/service/impl/BuildJobActionImpl.java
@@ -22,6 +22,7 @@
import com.webank.wedatasphere.dss.linkis.node.execution.job.Job;
import com.webank.wedatasphere.dss.linkis.node.execution.job.LinkisJob;
import com.webank.wedatasphere.dss.linkis.node.execution.service.BuildJobAction;
+import com.webank.wedatasphere.dss.linkis.node.execution.utils.LinkisJobExecutionUtils;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.linkis.manager.label.constant.LabelKeyConstant;
@@ -75,8 +76,7 @@ private String parseExecutionCode(Job job) {
String code = job.getCode();
logger.info("The parseExecutionCode0X code for the job is {}", code);
if (StringUtils.isEmpty(code) || code.equalsIgnoreCase("null")) {
- Gson gson = new Gson();
- code = gson.toJson(job.getParams());
+ code = LinkisJobExecutionUtils.gson.toJson(job.getParams());
logger.info("The executable code for the job is {}", code);
}
return code;
@@ -88,8 +88,7 @@ private String parseExecutionCodeFor1X(Job job) {
logger.info("The parseExecutionCodeFor1X code for the job is {}", code);
//for appconn node in subflow contains embeddedFlowId
if (StringUtils.isEmpty(code) || code.equalsIgnoreCase("null") || code.contains(EMBEDDED_FLOW_ID.getValue())) {
- Gson gson = new Gson();
- code = gson.toJson(job.getParams());
+ code = LinkisJobExecutionUtils.gson.toJson(job.getParams());
logger.info("The executable code for the job is {}", code);
}
return code;
diff --git a/dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/restful/FlowRestfulApi.java b/dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/restful/FlowRestfulApi.java
index 2e8fa292c0..156b181240 100644
--- a/dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/restful/FlowRestfulApi.java
+++ b/dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/restful/FlowRestfulApi.java
@@ -174,11 +174,11 @@ public Message publishWorkflow(@RequestBody PublishWorkflowRequest publishWorkfl
* @return
*/
@RequestMapping(value = "getReleaseStatus", method = RequestMethod.GET)
- public Message getReleaseStatus(@NotNull(message = "查询的发布id不能为空") @RequestParam(required = false, name = "releaseTaskId") Long releaseTaskId) {
+ public Message getReleaseStatus(@NotNull(message = "查询的发布id不能为空") @RequestParam(required = false, name = "releaseTaskId") String releaseTaskId) {
String username = SecurityFilter.getLoginUsername(httpServletRequest);
Message message;
try {
- ResponseConvertOrchestrator response = publishService.getStatus(username, releaseTaskId.toString());
+ ResponseConvertOrchestrator response = publishService.getStatus(username, releaseTaskId);
if (null != response.getResponse()) {
String status = response.getResponse().getJobStatus().toString();
status = StringUtils.isNotBlank(status) ? status.toLowerCase() : status;
diff --git a/dss-standard/development-standard/development-process-standard/src/main/java/com/webank/wedatasphere/dss/standard/app/development/utils/DevelopmentOperationUtils.java b/dss-standard/development-standard/development-process-standard/src/main/java/com/webank/wedatasphere/dss/standard/app/development/utils/DevelopmentOperationUtils.java
index 93ead04e9b..f3d4ed205c 100644
--- a/dss-standard/development-standard/development-process-standard/src/main/java/com/webank/wedatasphere/dss/standard/app/development/utils/DevelopmentOperationUtils.java
+++ b/dss-standard/development-standard/development-process-standard/src/main/java/com/webank/wedatasphere/dss/standard/app/development/utils/DevelopmentOperationUtils.java
@@ -91,7 +91,10 @@ public static V tryDeve
String error = String.format("%s failed. Caused by: %s.", errorMsg, ExceptionUtils.getRootCauseMessage(e));
throw new ExternalOperationFailedException(50010, error, e);
}
- if(responseRef.isFailed()) {
+ if(responseRef == null) {
+ LOGGER.error("{} failed. Caused by: empty responseRef returned by AppConn.", errorMsg);
+ throw new ExternalOperationFailedException(61123, errorMsg + " failed. Caused by: empty responseRef returned by AppConn.");
+ } else if(responseRef.isFailed()) {
LOGGER.error("{} failed. Caused by: {}.", errorMsg, responseRef.getErrorMsg());
DSSExceptionUtils.dealWarnException(61123,
String.format("%s failed. Caused by: %s.", errorMsg, responseRef.getErrorMsg()),
diff --git a/images/en_US/readme/communication.png b/images/en_US/readme/communication.png
index 12e86727da..cec5ef68ce 100644
Binary files a/images/en_US/readme/communication.png and b/images/en_US/readme/communication.png differ
diff --git a/images/zh_CN/readme/communication.png b/images/zh_CN/readme/communication.png
index 12e86727da..cec5ef68ce 100644
Binary files a/images/zh_CN/readme/communication.png and b/images/zh_CN/readme/communication.png differ